AudioBufferSourceNode not looping when duration is set - audio

I've been playing around with js audio web api.
The thing I'm trying to achieve is to play a piece of a track in loop.
No problem playing the whole track in loop, but if I define a duration then it doesn't loop anymore...I guess what I need is like a marker more than a duration...if there a way to do this?
const audioCtx = new AudioContext();
const srcUrl = 'https://freesound.org/data/previews/251/251248_1137749-lq.mp3';
let srcArrayBuffer;
let playingTrack;
async function loadSrcAudioFile(url) {
const response = await fetch(url);
const buffer = await response.arrayBuffer();
return buffer;
}
function loop() {
playingTrack = audioCtx.createBufferSource();
playingTrack.connect(audioCtx.destination);
playingTrack.buffer = srcArrayBuffer;
playingTrack.loop = true;
//playingTrack.loopStart = 0;
//playingTrack.loopEnd = 1.5;
playingTrack.start(audioCtx.currentTime, 0, 1.5);
}
function stop() {
playingTrack.stop();
playingTrack = null;
}
async function play() {
if (!srcArrayBuffer) {
const buffer = await loadSrcAudioFile(srcUrl);
srcArrayBuffer = await audioCtx.decodeAudioData(buffer);
}
loop();
}
document.getElementById('playBtn').addEventListener('click', play);
document.getElementById('stopBtn').addEventListener('click', stop);
JSFIDDLE

You're almost there. Setting the duration means "play this for this long". It will stop after 1.5 seconds no matter what. It doesn't care if the buffer gets looped or not.
Setting loopStart and loopEnd without specifying the duration will do the trick.
https://jsfiddle.net/4yeavmp3/
If you for example want to loop it 4 times you could set the duration to 6.

Related

Flutter - how do I get the duration of a picked video file on Web?

I've tried everything and failed to determine the duration of a picked video file on Flutter Web. All libraries on pub.dev need a 'File' and this is not available on the web.
I've not been able to get this from the metadata either.
What worked for me, though I am unhappy with the solution approach is:
Widget buildHiddenVideoPlayer(Key key) {
var _videoElement = html.VideoElement();
_videoElement.id = 'htmlHiddenVideoID';
_videoElement.autoplay = true;
_videoElement.muted = true;
_videoElement.loop = true;
_videoElement.controls = false;
_videoElement.defaultMuted = true;
if (fileData != null) {
_videoElement.src = xfile.path; // where xfile is the file picked as XFile
}
// ignore: undefined_prefixed_name
ui.platformViewRegistry.registerViewFactory(
'htmlHiddenVideoID',
(int viewId) => _videoElement,
);
return HtmlElementView(
key: key,
viewType: 'htmlHiddenVideoID',
);
}
This widget is hidden in a 5 x 5 sized box behind a iFrame widget (in my implementation)
Then when I need the duration of a picked file:
VideoElement element = document.getElementById('htmlHiddenVideoID') as VideoElement;
setState(() {
element.src = pickedFile.path;
});
while (true) {
await Future.delayed(const Duration(milliseconds: 200), () {});
duration = element.duration;
if (!duration.isNaN) break; // duration is not returned immediately in many cases
}
element.pause(); // stops the hidden video from playing and consuming resources

Reusing a function for buttons that work independently of each other?

I'm trying to create something with P5.js that resembles an audio looper—that is, you record a snippet of audio, it plays back to you as a loop, and then you can record other snippets of audio to play together to create a song.
I figured out how to record audio, play it back as a loop, and stop the loop, but I'm not sure the best way to repeat that function so that it can be used for buttons that function independently of each other and record separate audio files, as I would like to record multiple loops.
I'm still pretty new to P5.js, so there's probably a simple way to do this—any ideas help! In general, if you have any ideas on how to achieve this project as a whole (the audio looper) I'd love to hear them.
This is my code:
let mic, recorder, soundFile, button;
let state = 0;
function setup() {
createCanvas(windowWidth, windowHeight);
background(200);
mic = new p5.AudioIn();
mic.start();
recorder = new p5.SoundRecorder();
recorder.setInput(mic);
soundFile = new p5.SoundFile();
button = createButton("record");
button.size(200, 100);
button.style("font-family", "Bodoni");
button.style("font-size", "48px");
button.position(10, 10, 10);
button.mouseClicked(loopRecord);
}
// this is the looper
function loopRecord() {
if (state === 0 && mic.enabled) {
recorder.record(soundFile);
background(255, 0, 0);
state++;
} else if (state === 1) {
recorder.stop();
background(0, 255, 0);
state++;
} else if (state === 2) {
soundFile.loop();
state++;
} else if (state === 3) {
soundFile.stop();
state++;
} else if (state === 4) {
state === 0;
}
}
You should somehow structure your data so that each button has its own state (and other parameters that should not be global, but instead unique to each loop).
Here an example using an array of track objects:
https://editor.p5js.org/RemyDekor/sketches/gM75vBYmk
I'll copy the code here as well
let mic
// this array will be filled afterwards
let tracksArray = [];
function setup() {
createCanvas(400, 400);
background(200);
// we need only one mic (it is still global)
mic = new p5.AudioIn();
mic.start();
// we call a function that creates a custom "track" object and puts it in the tracksArray (which you could use for some other things, but is currently un-used)
createTrack();
// we create a button that call the same function when we click on it
const addTrackButton = createButton("add track");
addTrackButton.mouseClicked(createTrack)
addTrackButton.position(10, 10)
}
// We now use names (strings) instead of abstract numbers to set the state of the track
function handleTrackButtonClick(trackObject) {
if (trackObject.state === "idle" && mic.enabled) {
trackObject.recorder.record(trackObject.soundFile);
background(255, 0, 0);
trackObject.state = "recording";
console.dir(trackObject.button.elt.innerText = "[recording..]");
} else if (trackObject.state === "recording") {
trackObject.recorder.stop();
background(0, 255, 0);
trackObject.state = "stopped";
console.dir(trackObject.button.elt.innerText = "play");
} else if (trackObject.state === "stopped") {
trackObject.soundFile.loop();
trackObject.state = "playing";
console.dir(trackObject.button.elt.innerText = "[playing..] stop")
} else if (trackObject.state === "playing") {
trackObject.soundFile.stop();
trackObject.state = "stopped";
console.dir(trackObject.button.elt.innerText = "[stopped..] play")
}
}
function createTrack() {
const newButton = createButton("record");
newButton.style("font-family", "Bodoni");
newButton.style("font-size", "48px");
// we do not position this button anymore, and use the "flow" of the html document
newButton.style("display", "block");
// this is the "track" object we create, and we attach the previously global parameters/states on it
const newTrackObject = {
button: newButton,
state: "idle",
recorder: new p5.SoundRecorder(),
soundFile: new p5.SoundFile()
};
newButton.mouseClicked(function() {
handleTrackButtonClick(newTrackObject)
});
newTrackObject.recorder.setInput(mic);
tracksArray.push(newTrackObject);
}

Shooting GameObjects in Vuforia

**HI Actually I want to throw gameobjects like Shooting Bullets But it does not work. It throws unlimited gameObjects without any wait. Here is My script on ARCamera.
//Script starts
var prefabBullet : Transform;
var speed : float;
var gameObjects : GameObject[];
function Update () { Invoke("Shoot", 2.0f); }
function Shoot () { yield WaitForSeconds(3);
var instanceBullet=Instantiate(prefabBullet,transform.position,Quaternion.identity);
instanceBullet.rigidbody.AddForce(transform.forward*speed) ;
SomeFunction();
}
function SomeFunction() {
gameObjects = GameObject.FindGameObjectsWithTag ("Player");
for(var i = 0 ; i < gameObjects.length ; i ++)
Destroy(gameObjects[i]);
}
========================================================================
I want to throw gameObjects between some time elapsing.**
function Update () { Invoke("Shoot", 2.0f); }
function Shoot () { yield WaitForSeconds(3);
All you are doing here is putting in a 5second delay on the call, but the call is still being made every Update frame, so you get a 5 second delay before the first call, but then they will be every frame.
set up a boolean to control when the shooting happens.
var fire : bool = true;
function Update()
{
if (fire)
{
Shoot();
fire = false;
}
}
function Shoot()
{
yield WaitForSeconds(3);
//do you projectile stuff
fire = true;
}

How to play overlapping audio in winrt?

I'm porting an app from wp8 that requires playback of various sounds that can overlap. The only way I've found so far it to use MediaElement, but this doesn't allow overlapping sounds.
QUESTION - what is the easiest and best audio engine to use to play overlapping audio? Ideally I need a small example of how I can do this.
I've looked into WASAPI (http://code.msdn.microsoft.com/windowsapps/Windows-Audio-Session-22dcab6b), but it doesn't look like it supports simple playback ?
Maybe I can wrap the MediaFoundation and call it from winrt? (MediaEngine audio playback on WinRT)
Here is my code now, but when I play a new sound it cuts off the previously playing one rather than blending them.
ThreadUtility.runOnUiThread(
async delegate()
{
// TODO doesn't allow sounds to overlap!
Uri uri = new Uri(R.base_uri, R.raw.URI_PREFIX + resourceId);
StorageFile storageFile =
await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(
uri);
MediaElement element = new MediaElement();
var randomAccessStream = await storageFile.OpenReadAsync();
element.SetSource(randomAccessStream, storageFile.ContentType);
element.Volume = volume;
element.PlaybackRate = pitch;
//TODO element.Pan = pan;
element.Play();
}
);
SOLUTION (as per Filip's answer):
in the page class:
var mediaElements = new LinkedList<MediaElement>();
{
for (int channel = 0; channel < TeacherSoundGroover.NUM_CHANNELS; channel++)
{
var mediaElement = new MediaElement();
mediaElements.add(mediaElement);
// Must be in the tree otherwise it won't overlap!
m_titlePanel.Children.Add(mediaElement);
}
}
m_soundPlayer = new MySoundPlayer(mediaElements);
}
in the MySoundPlayer class:
ThreadUtility.runOnUiThread(
async delegate()
{
Uri uri = new Uri(R.base_uri, R.raw.URI_PREFIX + resourceId);
StorageFile storageFile =
await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(
uri);
if(m_mediaElements != null)
{
int count = m_mediaElements.size();
if (count > 0)
{
int channel = m_nextMediaElementToUse % count;
m_nextMediaElementToUse++;
MediaElement element = m_mediaElements.get(channel);
var randomAccessStream = await storageFile.OpenReadAsync();
element.Stop();
element.DefaultPlaybackRate = rate;
element.SetSource(randomAccessStream, storageFile.ContentType);
element.Volume = volume;
element.Balance = pan;
element.Play();
}
}
}
);
The easiest thing to do is use multiple MediaElement controls, though that might not give you desired results. The best way is to use XAudio2 either directly or through SharpDX if you want to avoid creating a C++/CX WinRT component.

Why doesn't this program output any data?

I'm messing with the Node.js 0.10 Stream classes to try and figure out how to use them. I'm not sure why this experiment isn't working. It's supposed to output letters of the alphabet to an HTTP response object, but does not. I've annotated the source with a few comments.
Thank you!
var Readable = require('stream').Readable
, inherits = require('util').inherits
, http = require('http');
/**
* A stream that streams the English alphabet
*/
function AlphabetStream() {
Readable.call(this);
this.code = this.offset = 'a'.charCodeAt(0);
this.last = 'z'.charCodeAt(0);
}
inherits(AlphabetStream, Readable);
AlphabetStream.prototype._read = function(size) {
for (var i = 0; i < size; i++)
this.push(this.next_char());
this.push(null);
};
AlphabetStream.prototype.next_char = function() {
var cycle = this.last+1;
return String.fromCharCode((++this.code % cycle) + this.offset);
};
/**
* An HTTP server, prints the first n letters of the English alphabet
*/
var server = http.createServer(function(req, res) {
// $ curl localhost:3001/?size=11
var size = require('url').parse(req.url, true).query.size;
if (size) {
var rs = new AlphabetStream;
rs.pipe(res); // This calls `_read()` with `size` 16kb
rs.read(parseInt(size)); // This also calls `_read()` with `size` 16kb
}
res.end(''); // Nothing gets printed, despite the pipe and the reading.
});
server.listen(3001, function() {
console.log('Listening on 3001');
});
Have a look at this piece of code:
if (size) {
var rs = new AlphabetStream;
rs.pipe(res); // This calls `_read()` with `size` 16kb
rs.read(parseInt(size)); // This also calls `_read()` with `size` 16kb
}
res.end(''); // Nothing gets printed, despite the pipe and the reading.
You end the response (last line) before the actual piping can take place (this happens because .pipe is asynchronous). What you should do is something like that:
if (size) {
var rs = new AlphabetStream;
rs.pipe(res);
rs.read(parseInt(size));
} else {
// NOTE THE ELSE STATEMENT
res.end('');
}
.pipe function will take care of ending the destination stream (i.e. the response) unless explicitely stated otherwise, see the docs:
http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
EDIT As for why 16kb? Well, I had to do some tests and it seems that this is the default behaviour of .pipe (and I'm not sure how to change that to be honest). First of all note that this line:
rs.read(parseInt(size));
is totally useless (you can remove it). .pipe will take care of reading the data. Now the default behaviour is to read chunks of 16kb of data. So in order to do what you are trying to do you should probably pass size to the constructor of AlphabetStream, like this:
function AlphabetStream(size) {
Readable.call(this);
this.code = this.offset = 'a'.charCodeAt(0);
this.last = 'z'.charCodeAt(0);
this.size = size; // <--- store size here
}
inherits(AlphabetStream, Readable);
AlphabetStream.prototype._read = function(size) {
// this allows the stream to be a true stream
// it reads only as much data as it can
// but .read can be called multiple times without issues
// with a predefined limit
var chunk = Math.min(size, this.size);
this.size -= chunk;
for (var i = 0; i < chunk; i++) {
this.push(this.next_char());
}
if (!this.size) {
// end the stream only when the limit is reached
this.push(null);
}
};
after all a stream should not depend on how much data you read. Then you do:
if (size) {
var rs = new AlphabetStream(parseInt(size));
rs.pipe(res);
} else {
res.end('');
}

Resources