Recording result of audio processed from a buffer and placing it in an audio element - audio

I have a JavaScript file that takes an audio file, places it in a buffer and then allows the audio in the buffer to be processed (by changing the speed and loop duration).
I wish to record the resulting audio, including the manipulation, and place it in an <audio> element. I have tried to do this by integrating the code given in the first answer here:
Record sound of a webaudio api's audio context
with my JavaScript file. However, I get the following errors when I press the play button (which should also start the recording):
GET http://localhost:8000/[object%20AudioBufferSourceNode] [HTTP/1.0 404 File not found 1ms]
HTTP load failed with status 404. Load of media resource http://localhost:8000/[object%20AudioBufferSourceNode] failed. index.html
Uncaught (in promise) DOMException: The media resource indicated by the src attribute or assigned media provider object was not suitable.
I believe I am not hooking up the media recorder correctly. Can anybody assist?
Thank you,
My html is:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width">
<title>decodeAudioData example</title>
<link rel="stylesheet" href="">
<!--[if lt IE 9]>
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
</head>
<body>
<h1>decodeAudioData example</h1>
<button class="play">Play</button>
<button class="stop">Stop</button>
<h2>Set playback rate</h2>
<input class="playback-rate-control" type="range" min="0.25" max="3" step="0.05" value="1">
<span class="playback-rate-value">1.0</span>
<h2>Set loop start and loop end</h2>
<input class="loopstart-control" type="range" min="0" max="20" step="1" value="0">
<span class="loopstart-value">0</span>
<input class="loopend-control" type="range" min="0" max="20" step="1" value="0">
<span class="loopend-value">0</span>
<br>
<br>
<!--Audio recoring destination element-->
<audio id='recording' controls='true'></audio>
</body>
</html>
My JavaScript is:
let audioCtx;
let source;
let songLength;
const pre = document.querySelector('pre');
const myScript = document.querySelector('script');
const play = document.querySelector('.play');
const stop = document.querySelector('.stop');
const playbackControl = document.querySelector('.playback-rate-control');
const playbackValue = document.querySelector('.playback-rate-value');
playbackControl.setAttribute('disabled', 'disabled');
const loopstartControl = document.querySelector('.loopstart-control');
const loopstartValue = document.querySelector('.loopstart-value');
loopstartControl.setAttribute('disabled', 'disabled');
const loopendControl = document.querySelector('.loopend-control');
const loopendValue = document.querySelector('.loopend-value');
loopendControl.setAttribute('disabled', 'disabled');
// Recording variables
var recorder=false;
var recordingstream=false;
// use XHR to load an audio track, and
// decodeAudioData to decode it and stick it in a buffer.
// Then we put the buffer into the source
function getData() {
/*if(window.webkitAudioContext) {
audioCtx = new window.webkitAudioContext();
} else {
audioCtx = new window.AudioContext();
}*/
audioCtx = new window.AudioContext();
source = audioCtx.createBufferSource();
request = new XMLHttpRequest();
request.open('GET', 'precky.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
let audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
songLength = buffer.duration;
source.buffer = myBuffer;
source.playbackRate.value = playbackControl.value;
source.connect(audioCtx.destination);
source.loop = true;
loopstartControl.setAttribute('max', Math.floor(songLength));
loopendControl.setAttribute('max', Math.floor(songLength));
},
function(e){"Error with decoding audio data" + e.error});
}
request.send();
}
function startrecording(){
recordingstream=audioCtx.createMediaStreamDestination();
recorder=new MediaRecorder(recordingstream.stream);
recorder.start();
}
function glue(){
let a=new Audio(source);
let mediasource=audioCtx.createMediaElementSource(a);
mediasource.connect(recordingstream);//connects also to MediaRecorder
a.play();
}
function stoprecording(){
recorder.addEventListener('dataavailable',function(e){
document.querySelector('#recording').src=URL.createObjectURL(e.data);
recorder=false;
recordingstream=false;
});
recorder.stop();
}
// wire up buttons to stop and play audio, and range slider control
play.onclick = function() {
getData();
source.start(0);
play.setAttribute('disabled', 'disabled');
playbackControl.removeAttribute('disabled');
loopstartControl.removeAttribute('disabled');
loopendControl.removeAttribute('disabled');
startrecording();
glue(source);
}
stop.onclick = function() {
source.stop(0);
play.removeAttribute('disabled');
playbackControl.setAttribute('disabled', 'disabled');
loopstartControl.setAttribute('disabled', 'disabled');
loopendControl.setAttribute('disabled', 'disabled');
stoprecording();
}
playbackControl.oninput = function() {
source.playbackRate.value = playbackControl.value;
playbackValue.innerHTML = playbackControl.value;
}
loopstartControl.oninput = function() {
source.loopStart = loopstartControl.value;
loopstartValue.innerHTML = loopstartControl.value;
}
loopendControl.oninput = function() {
source.loopEnd = loopendControl.value;
loopendValue.innerHTML = loopendControl.value;
}
The original decodeAudioData example (without my attempt to record) comes from Mozilla docs here:
https://mdn.github.io/webaudio-examples/decode-audio-data/ by chrisdavidmills

Related

How to autoplay enable for my livestream with voice

** Please help me for autoplay live stream with voice. There is muted autoplay.
please correct me where i am doing mistake. **
add video library
<script src="https://cdn.jsdelivr.net/npm/hls.js#latest"></script>
video add in your page
<video
id="my-player"
controls
autoplay muted loop
/>
script for videoplayer
<script>
const video = document.querySelector('#my-player');
const src = 'https://stream.mux.com/yyFGolovbOWRmtsF4eG01SVFgl6VsyGMH4i7dxUVsjxo.m3u8';
if (video.canPlayType('application/vnd.apple.mpegurl')) {
// Some browers (safari and ie edge) support HLS natively
video.src = src;
} else if (Hls.isSupported()) {
const hls = new Hls();
hls.loadSource(src)
hls.attachMedia(video);
}
</script>
Please correct me where i am doing mistake
Try this, load the source after the media is attached to the video element.
<script>
const video = document.querySelector('#my-player');
const src = 'https://stream.mux.com/yyFGolovbOWRmtsF4eG01SVFgl6VsyGMH4i7dxUVsjxo.m3u8';
if (video.canPlayType('application/vnd.apple.mpegurl')) {
// Some browers (safari and ie edge) support HLS natively
video.src = src;
} else if (Hls.isSupported()) {
const hls = new Hls();
//////hls.loadSource(src) // Remove this line
hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
// fired when MediaSource has been succesfully attached to media element
// Now load the source m3u8
try {
hls.loadSource(src);
} catch (err) {
console.log('Error Loading Source Media!!! ' + err);
}
});
}
</script>

Web Audio does not work on some devices (Panner Audio)

I am trying to add all my page sounds to web audio panner such that the sounds move from left to right. For this I created this function to add all of them to pannner but it seems to work only on some devices and on other devices no sound comes from audio even when it is showing the speaker icon in tab( audio is playing).
let panNode; var source=new Array(60);
const myAudio = document.querySelectorAll('audio');
const panControl = document.querySelector('.panning-control');
var AudioContext = window.AudioContext || window.webkitAudioContext;
const audioCtx = new AudioContext();
panNode = audioCtx.createStereoPanner();
for(let i = 0; i<myAudio.length; i++){
source[i] = audioCtx.createMediaElementSource(myAudio[i]);
source[i].connect(panNode);
panNode.connect(audioCtx.destination);
}
Ignore how I manage to move audio as I have created a separate function to automatically increase or decrease pannode value every second. I need help in adding all sounds to pannode compatible with all devices.
Unfortunately createStereoPanner() isn't available in Safari. It will probably be part of the next release though. It's already available if you switch on the "Modern WebAudio API".
For now you would need to use a polyfill. standardized-audio-context for example has an implementation of the StereoPannerNode which works in Safari. There is also the stereo-panner-node package which is a standalone polyfill. But that is not maintained any longer.
Here is a little example which uses standardized-audio-context:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
</head>
<body>
<audio crossOrigin="anonymous" src="https://mp3l.jamendo.com/?trackid=1630628&format=mp31&from=app-devsite"></audio>
<button id="play">play</button>
<button disabled id="stop">stop</button>
<input id="left" type="radio" name="panning" value="left"><label for="left">left</label>
<input id="center" type="radio" name="panning" value="center" checked><label for="center">center</label>
<input id="right" type="radio" name="panning" value="right"><label for="right">right</label>
<script type="module">
import { AudioContext } from 'https://jspm.dev/standardized-audio-context';
const audioContext = new AudioContext();
const stereoPanner = audioContext.createStereoPanner();
const audioElement = document.querySelector('audio');
const playButton = document.getElementById('play');
const stopButton = document.getElementById('stop');
const leftInput = document.getElementById('left');
const centerInput = document.getElementById('center');
const rightInput = document.getElementById('right');
const mediaElementAudioSourceNode = audioContext.createMediaElementSource(audioElement);
mediaElementAudioSourceNode
.connect(stereoPanner)
.connect(audioContext.destination);
playButton.addEventListener('click', () => {
stereoPanner.pan.value = leftInput.checked
? -1
: centerInput.checked
? 0
: 1;
audioContext.resume();
audioElement.play();
playButton.disabled = true;
stopButton.disabled = false;
leftInput.disabled = true;
centerInput.disabled = true;
rightInput.disabled = true;
});
stopButton.addEventListener('click', () => {
audioElement.pause();
audioElement.currentTime = 0;
playButton.disabled = false;
stopButton.disabled = true;
leftInput.disabled = false;
centerInput.disabled = false;
rightInput.disabled = false;
});
</script>
</body>
</html>

D3 in vscode webviews, can't see anything

I am trying to do a hello world example with d3 just to see if I can use the library. To this effect I copied this ultraminimialistic example into vscodes webview example.
This gave me the following code:
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const vscode = require("vscode");
const d3 = require("d3");
function activate(context) {
// Create and show a new webview
const panel = vscode.window.createWebviewPanel('catCoding', // Identifies the type of the webview. Used internally
'Cat Coding', // Title of the panel displayed to the user
vscode.ViewColumn.One, // Editor column to show the new webview panel in.
{} // Webview options. More on these later.
);
panel.webview.html = getWebviewContent();
d3.select(panel.document).append("span").text("Hello, world!");
context.subscriptions.push(vscode.commands.registerCommand('catCoding.start', () => {
}));
}
exports.activate = activate;
function getWebviewContent() {
return `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Cat Coding</title>
</head>
<body>
<script src="//d3js.org/d3.v3.min.js"></script>
<script>
d3.select("body").append("span")
.text("Hello, world!");
</script>
</body>
</html>`;
}
// this method is called when your extension is deactivated
function deactivate() { }
exports.deactivate = deactivate;
//# sourceMappingURL=extension.js.map
My expectation was to see a hello world just like int he example, but I see this instead:
There are no alert messages, the problems, output and debug consoles are all fine and even the chrome dev console reports no errors. I am stuck.
There's 2 different attempts at using d3 in here, they both fail, one uses JS directly, the other uses HTML.

jsdom + canvas on node.js: toDataURL() error

Using canvas#1.2.3 & jsdom#3.1.2 with node v0.12.2, I'm getting an error while trying to use the canvas toDataURL() function.
canvasTest.js:
$(function(){
var canvas = $('<canvas></canvas>').attr({'id':'canvasTest', 'width':'500', 'height':'500'});
var ctx=canvas[0].getContext("2d");
ctx.beginPath();
ctx.arc(100,75,50,0,2*Math.PI);
ctx.stroke();
$('#canvasWrap').append(canvas);
});
HTML Test:
<!doctype html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="canvasTest.js"></script>
<script type="text/javascript">
$(function(){
console.log($('body').html());
console.log($('#canvasTest').length);
console.log($('#canvasTest')[0].toDataURL());
})
</script>
</head>
<body>
<div id="canvasWrap"></div>
</body>
</html>
jsdom Test:
var canvas = require('canvas');
var jsdom = require('jsdom');
jsdom.env({
html: '<html><body><div id="canvasWrap"></div></body></html>',
scripts: ['127.0.0.1/jquery-2.1.4.min.js','127.0.0.1/canvasTest.js'],
done:function (err,win) {
var $ = win.jQuery;
$(function(){
console.log($('body').html());
console.log($('#canvasTest').length);
console.log($('#canvasTest')[0].toDataURL());
});
}
});
On my HTML Test in Chrome I get the correct base64-encoded canvas data, while in node.js, the error reads:
TypeError: Cannot read property 'toDataURL' of undefined
You are selecting by ID, so there is no array. Drop the [0] and it should work, or select it another way, with an array:
console.log($('canvas')[0].toDataURL());
or try this:
console.log($('#canvasTest').toDataURL());
or this:
console.log(win.$("#canvasTest").toDataURL());
or this:
console.log(win.document.getElementById("canvasTest").toDataURL());
or this:
console.log(win.document.getElementsByTagName("canvas")[0].toDataURL());

webcam displayed on LAN not to the internet

OK, this is not due to a dumb mistake: I have forwarded the port and there is no firewall in between.
I try to make the "live streaming" section of jsmpeg (https://github.com/phoboslab/jsmpeg) to work on the Internet. To get the webcam stream on my webpage I modify "stream-server.js"
if( process.argv.length < 3 ) {
console.log(
'Usage: \n' +
'node stream-server.js <secret> [<stream-port> <websocket-port>]'
);
process.exit();
}
var STREAM_SECRET = process.argv[2],
STREAM_PORT = process.argv[3] || 8082,
WEBSOCKET_PORT = process.argv[4] || 8084,
STREAM_MAGIC_BYTES = 'jsmp'; // Must be 4 bytes
var width = 320,
height = 240;
// Websocket Server
var socketServer = new (require('ws').Server)({port: WEBSOCKET_PORT});
socketServer.on('connection', function(socket) {
// Send magic bytes and video size to the newly connected socket
// struct { char magic[4]; unsigned short width, height;}
var streamHeader = new Buffer(8);
streamHeader.write(STREAM_MAGIC_BYTES);
streamHeader.writeUInt16BE(width, 4);
streamHeader.writeUInt16BE(height, 6);
socket.send(streamHeader, {binary:true});
console.log( 'New WebSocket Connection ('+socketServer.clients.length+' total)' );
socket.on('close', function(code, message){
console.log( 'Disconnected WebSocket ('+socketServer.clients.length+' total)' );
});
});
socketServer.broadcast = function(data, opts) {
for( var i in this.clients ) {
if (this.clients[i].readyState == 1) {
this.clients[i].send(data, opts);
}
else {
console.log( 'Error: Client ('+i+') not connected.' );
}
}
};
// HTTP Server to accept incomming MPEG Stream
var streamServer = require('http').createServer( function(request, response) {
var params = request.url.substr(1).split('/');
if( params[0] == STREAM_SECRET ) {
width = (params[1] || 320)|0;
height = (params[2] || 240)|0;
console.log(
'Stream Connected: ' + request.socket.remoteAddress +
':' + request.socket.remotePort + ' size: ' + width + 'x' + height
);
request.on('data', function(data){
socketServer.broadcast(data, {binary:true});
});
}
else {
console.log(
'Failed Stream Connection: '+ request.socket.remoteAddress +
request.socket.remotePort + ' - wrong secret.'
);
response.end();
}
}).listen(STREAM_PORT);
console.log('Listening for MPEG Stream on http://127.0.0.1:'+STREAM_PORT+'/<secret>/<width>/<height>');
console.log('Awaiting WebSocket connections on ws://127.0.0.1:'+WEBSOCKET_PORT+'/');
var servi = require('servi'), // include the servi library
app = new servi(false); // servi instance
// configure the server's behavior:
app.port(8080); // port number to run the server on
app.serveFiles("public"); // serve all static HTML files from /public
app.start();
console.log("Listening for new clients on port 8080");
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=320, initial-scale=1"/>
<title>jsmpeg streaming webcam</title>
<style type="text/css">
body {
background: white;
text-align: center;
margin-top: 10%;
}
#videoCanvas {
/* Always stretch the canvas to 640x480, regardless of its
internal size. */
width: 640px;
height: 480px;
}
</style>
</head>
<body>
<h1>
The Canvas size specified
</h1>
<!-- The Canvas size specified here is the "initial" internal resolution. jsmpeg will
change this internal resolution to whatever the source provides. The size the
canvas is displayed on the website is dictated by the CSS style.
-->
<canvas id="videoCanvas" width="640" height="480">
<p>
Please use a browser that supports the Canvas Element, like
Chrome,
Firefox,
Safari or Internet Explorer 10
</p>
</canvas>
<script type="text/javascript" src="jsmpg.js"></script>
<script type="text/javascript">
// Show loading notice
var canvas = document.getElementById('videoCanvas');
var ctx = canvas.getContext('2d');
ctx.fillStyle = '#444';
ctx.fillText('Loading...', canvas.width/2-30, canvas.height/3);
// Setup the WebSocket connection and start the player
var client = new WebSocket( 'ws://192.168.1.15:8084/' );
var player = new jsmpeg(client, {canvas:canvas});
</script>
</body>
</html>
The "index.html" is included in a "public" folder.
Then I start ffmpeg with:
`ffmpeg -s 640x480 -f video4linux2 -i /dev/video0 -f mpeg1video -b:v 800k -r 30 http://192.168.1.15:8082/1693/640/480/`
When I open, on any station of the lan, url 192.168.1.x:8080 I get the webcam streaming on my page "jsmpeg streaming webcam"; but if use the public ip 41.142.x.y:8080 from outside the local network (using 3G) I get the page (with the h1 line "The Canvas size specified") but no video it just says in the frame "loading".
I do not understand why it works locally and not globally. Why the page is streaming on the internet but not the webcam?
Thanks for your help!
in the client side, you should change this:
var client = 'ws://'+document.location.hostname+':8084/';
var player = new JSMpeg.Player(client, {canvas: canvas});
The way you were doing this was with the static ip 192.168.1.15. When it gets to the client, he doesn't know who this ip is.
I have not tested it, but I'm pretty sure the NAT will translate the ip addresses...

Resources