Unity3D nothing but distortion when playing audio clips - audio

I'm having a lot of trouble with playing audio clips. I suspect it has something to do with update playing my clip from the start every time it executes the block of code that sends the clip to the audio source. Below is a 'solution' but the fact that it works doesn't even make sense to me. If I just put the audio.play() in the main if statement without checking whether it's less than the audio clip length, I hear nothing but a quiet distortion.
void Update()
{
switch(weatherState)
{
case WeatherStates.Sunny:
break;
case WeatherStates.Rain:
Rain();
break;
case WeatherStates.Snow:
break;
case WeatherStates.ThunderStorm:
break;
}
}
void Rain()
{
if(timeScript.hourOfWeatherEvent != 0)
{
if(timeScript.hourCount >= timeScript.hourOfWeatherEvent)
{
rain.SetActive(true);
if(soundCount < weatherSounds[0].length)
{
soundCount++;
audio.clip = weatherSounds[0];
audio.Play ();
}
timeScript.durationOfWeatherEvent -= Time.deltaTime * timeScript.timeSpeed;
if(timeScript.durationOfWeatherEvent <= 0)
{
rain.SetActive(false);
timeScript.durationOfWeatherEvent = 0;
timeScript.hourOfWeatherEvent = 0;
weatherState = WeatherStates.Sunny;
}
}
}
}

Related

SFML foreground collision

Ive been trying to make an RPG game on SFML and now Im kind of struggling on the collisions. My problem is quiet simple, I have this 2 layers, Background and Foreground. Background just acts as a background image and Foreground png image is supposed to act as the collisions, having the part where the character is supposed to walk completely free (transparent) and keeping the rest of the structures to use as collisions (Background image here: https://imgur.com/gallery/DA3zGtD Im making the floor transparent while keeping the rest). Ive tried using the getGlobalBounds().intesect with the character sprite without any succes. I wanna keep it as simple as possible, here is what I have until now:
#include <iostream>
#include "Piso1.h"
using namespace std;
Piso1::Piso1(){
};
int Piso1::Draw(RenderWindow &window, Event &evento)
{
srand(time(nullptr));
Soundtrack.openFromFile("../Scenes/Piso1/Sounds/Neon District.wav");
Soundtrack.setLoop(true);
Soundtrack.play();
Texture BGTexture;
BGTexture.loadFromFile("../Scenes/Piso1/Graphics/piso1background.png");
Sprite Background;
Background.setTexture(BGTexture);
Background.setScale(8,7.5);
Background.setPosition(BackX,BackY);
Texture FGTexture;
FGTexture.loadFromFile("../Scenes/Piso1/Graphics/piso1foreground.png");
Sprite Foreground;
Foreground.setTexture(FGTexture);
Foreground.setScale(8,7.5);
Foreground.setPosition(BackX,BackY);
Texture ProtaTextura;
ProtaTextura.loadFromFile("../Scenes/Piso1/Graphics/pSprite.png");
IntRect SpriteBx(0,0,34,47);
Sprite Protagonista(ProtaTextura,SpriteBx);
Protagonista.setPosition((window.getSize().x)/2.35,(window.getSize().y)/3);
Protagonista.setScale(3,3);
while (window.isOpen()) {
while (window.pollEvent(evento)) {
switch (evento.type) {
case Event::Closed:
window.close();
break;
case Event::KeyPressed:
EncounterValue = rand()%1000;
if(EncounterValue > 5){
if(evento.key.code == Keyboard::Down) {
BackY -= 10;
Background.move(0,-10);
Foreground.move(0,-10);
//this is my failed attempt
if(Protagonista.getLocalBounds().intersects(Foreground.getLocalBounds()))
{
Collision.openFromFile("../Scenes/Piso1/Sounds/oof.ogg");
Collision.play();
BackY += 10;
Background.move(0, 10);
Foreground.move(0, 10);
}
if(clock1.getElapsedTime().asMilliseconds()>64){
SpriteBx.top = 0;
if (SpriteBx.left == 0)
SpriteBx.left = 34;
else if (SpriteBx.left==34)
SpriteBx.left= 68;
else if (SpriteBx.left== 68)
SpriteBx.left= 102;
else
SpriteBx.left=0;
Protagonista.setTextureRect(SpriteBx);
clock1.restart();
}
break;
}
else if (evento.key.code == Keyboard::Up) {
BackY += 10;
Background.move(0,10);
Foreground.move(0,10);
if (clock1.getElapsedTime().asMilliseconds()>64)
{
SpriteBx.top = 152;
if (SpriteBx.left == 0)
SpriteBx.left = 34;
else if (SpriteBx.left==34)
SpriteBx.left= 68;
else if (SpriteBx.left== 68)
SpriteBx.left= 102;
else
SpriteBx.left=0;
Protagonista.setTextureRect(SpriteBx);
clock1.restart();
}
break;
}
else if(evento.key.code == Keyboard::Left) {
BackX += 10;
Background.move(10,0);
Foreground.move(10,0);
if (clock1.getElapsedTime().asMilliseconds()>64)
{
SpriteBx.top = 53;
if (SpriteBx.left == 0)
SpriteBx.left = 34;
else if (SpriteBx.left==34)
SpriteBx.left= 68;
else if (SpriteBx.left== 68)
SpriteBx.left= 102;
else
SpriteBx.left=0;
Protagonista.setTextureRect(SpriteBx);
clock1.restart();
}
break;
}
else if(evento.key.code == Keyboard::Right){
BackX -= 10;
Background.move(-10,0);
Foreground.move(-10,0);
if (clock1.getElapsedTime().asMilliseconds()>64)
{
SpriteBx.top = 104;
if (SpriteBx.left == 0)
SpriteBx.left = 34;
else if (SpriteBx.left==34)
SpriteBx.left= 68;
else if (SpriteBx.left== 68)
SpriteBx.left= 102;
else
SpriteBx.left=0;
Protagonista.setTextureRect(SpriteBx);
clock1.restart();
}
break;
}
else if(evento.key.code == Keyboard::C){
Soundtrack.stop();
return 1;
}
}
else{
Soundtrack.stop();
return 0;
}
}
window.clear();
window.draw(Foreground);
window.draw(Background);
window.draw(Protagonista);
window.display();
}
}
}
I think whats happening is that when I want to do the if it takes the whole png instead of just the parts I want as collision. I also wanted to try color collision but I really dont know how to implement it, Im kind of new to SFML. Thanks in advance!
If you wish to have proper collision detection between the player and other objects .intersect() is not good enough. It will either make your character permamently stuck in the structure or make it pass through it without player control, which depends on what behavior you code for this condition.
What would work is detecting from which direction the player is approaching the object, and based on that either make him be moved back a bit or set the speed to 0 if current velocity would take him deeper into the structure.
For example, using intersect() as you are, for case of player colliding with a wall from the left:
if(velocity.x > 0 && player.getLocalBounds().intersect(wall.getLocalBounds()) && player.getLocalBounds().left > wall.getLocalBounds().left)
Then you can use this condition to either stop the player (set the velocity.x to 0), set reverse it so he backs off or do any other kind of behavior you'd wish in the event of a collision.
(Note this is not an ideal solution, but it should work and follow similar logic you've implemented.)

JUCE - play audio input back

I am learning JUCE and I am writing a program that just reads the input from the audio card and plays it back. Obviously this is just for learning purposes. I am using the audio application template. This is the code inside the getNextAudioBlock() function:
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override
{
if(true) // this is going to be replaced by checking the value of a button
{
const int channel = 0;
if(true) // this is going to be replaced too
{
const float* inBuffer = bufferToFill.buffer->getReadPointer(channel, bufferToFill.startSample);
float* outBuffer = bufferToFill.buffer->getWritePointer(channel, bufferToFill.startSample);
for(int sample = 0; sample < bufferToFill.numSamples; ++sample)
outBuffer[sample] = inBuffer[sample];
}
else
{
bufferToFill.buffer->clear(0, bufferToFill.startSample, bufferToFill.numSamples);
}
}
else
{
bufferToFill.buffer->clear(0, bufferToFill.startSample, bufferToFill.numSamples);
}
}
The code is really simple: the content from the input buffer is copied directly to the output buffer. However, I am not hearing anything. What am I doing wrong?

How to make flash light application in Java ME?

Trying to make a flash light application, I am not getting it working. This is my code:
public void setFlash(Controllable player) {
FlashControl flashControl =
(FlashControl) getControl(player, "javax.microedition.amms.control.camera.FlashControl");
if (flashControl != null) {
int[] supportedFlash = flashControl.getSupportedModes();
if (supportedFlash != null && supportedFlash.length > 0) {
for (int i = 0; i < supportedFlash.length; i++) {
if (supportedFlash[i] == DESIRED_FLASH) {
try {
flashControl.setMode(DESIRED_FLASH);
} catch (IllegalArgumentException iae) {
// continue
}
break;
}
}
}
}
}
According to JSR 234 documentation FlashControl has six public constants:
AUTO: The camera will autoflash according to the lighting condition
AUTO_WITH_REDEYEREDUCE: The camera will autoflash according to the lighting conditions and if it flashes it will use red-eye reduction
FILLIN: Reduced flash
FORCE: Camera flash is on
FORCE_WITH_REDEYEREDUCE: Camera flash is on and red-eye reduction is in use
OFF: Camera flash is off
You should use FORCE or FORCE_WITH_REDEYEREDUCE

processing - record and show video in the processing sketch

I have a webcam feed in my processing sketch and i can record and save the video. What i wanna accomplish is that when i go to the next case (drawScreenOne) that the video i just recorded will show up on the canvas. The problem that i have now, is that when i save the video, with the video export library from com.hamoid, it gets saved in the same folder as my sketch, but to play a movie it needs to be in the data folder. So i can't play the movies without it manually moving to the data folder. Can you do that from within processing?
And how can i load up the videos that i just created in a case before? Do i need to use an array for that? I can play the movies when i manually move it to the data folder but i want processing to handle that.
this is the code i have so far:
import com.hamoid.*;
import processing.video.*;
import ddf.minim.*;
Minim minim;
AudioInput in;
AudioRecorder recorder;
Movie myMovie;
Movie myMovie1;
int currentScreen;
VideoExport videoExport;
boolean recording = false;
Capture theCap;
Capture cam;
int i = 0;
int countname; //change the name
int name = 000000; //set the number in key's' function
// change the file name
void newFile()
{
countname =( name + 1);
recorder = minim.createRecorder(in, "file/Sound" + countname + ".wav", true);
// println("file/" + countname + ".wav");
}
void setup() {
size(500,500);
frameRate(30);
noStroke();
smooth();
myMovie = new Movie(this, "video0.mp4");
myMovie.loop();
myMovie1 = new Movie(this, "video1.mp4");
myMovie1.loop();
String[] cameras = Capture.list();
if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
// The camera can be initialized directly using an
// element from the array returned by list():
//cam = new Capture(this, cameras[3]); //built in mac cam "isight"
cam = new Capture(this, 1280, 960, "USB-camera"); //externe camera Lex, linker USB
cam.start();
}
println("Druk op R om geluid en video op te nemen.Druk nog een keer op R om het opnemen te stoppen en druk op S om het op te slaan Druk vervolgens op Z om verder te gaan.");
videoExport = new VideoExport(this, "video" + i + ".mp4");
minim = new Minim(this);
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create a recorder that will record from the input to the filename specified, using buffered recording
// buffered recording means that all captured audio will be written into a sample buffer
// then when save() is called, the contents of the buffer will actually be written to a file
// the file will be located in the sketch's root folder.
newFile();//go to change file name
textFont(createFont("SanSerif", 12));
}
void draw() {
switch(currentScreen){
case 0: drawScreenZero(); break; //camera
case 1: drawScreenOne(); break; //1 video
case 2: drawScreenZero(); break; //camera
case 3: drawScreenTwo(); break; // 2 video's
case 4: drawScreenZero(); break; //camera
case 5: drawScreenThree(); break; //3 video's
case 6: drawScreenZero(); break; //camera
case 7: drawScreenFour(); break; //4 video's
default: background(0); break;
}
}
void mousePressed() {
currentScreen++;
if (currentScreen > 2) { currentScreen = 0; }
}
void drawScreenZero() {
println("drawScreenZero camera");
if (cam.available() == true) {
cam.read();
}
image(cam, 0,0,width, height);
// The following does the same, and is faster when just drawing the image
// without any additional resizing, transformations, or tint.
//set(0, 0, cam);
if (recording) {
videoExport.saveFrame();
}
for(int i = 0; i < in.bufferSize() - 1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Aan het opnemen...", 5, 15);
text("Druk op R als je klaar bent met opnemen en druk op S om het op te slaan.", 5, 30);
}
else
{
text("Gestopt met opnemen. Druk op R om op te nemen, druk op S om op te slaan.", 5, 15);
}
}
void drawScreenOne() {
background(0,255,0);
//fill(0);
//rect(250,40,250,400);
println("drawScreenOne 1 video");
image(myMovie, 0,0, (width/2),(height/2));
}
void drawScreenTwo(){
background(0,0,255);
println("drawScreenTwo 2 videos");
//triangle(150,100,150,400,450,250);
image(myMovie, 0,0, (width/2),(height/2));
image(myMovie1, (width/2),(height/2),(width/2),(height/2));
}
void drawScreenThree(){
//fill(0);
//rect(250,40,250,400);
background(255,0,0);
println("drawScreenThree 3 videos");
image(myMovie, 0,0, (width/2),(height/2));
image(myMovie1, (width/2),(height/2),(width/2),(height/2));
image(myMovie, (width/2),0, (width/2),(height/2));
}
void drawScreenFour(){
//triangle(150,100,150,400,450,250);
background(0,0,255);
println("drawScreenFour 4 videos");
image(myMovie, 0,0, (width/2),(height/2));
image(myMovie1, (width/2),(height/2),(width/2),(height/2));
image(myMovie, (width/2),0, (width/2),(height/2));
image(myMovie1, 0,(height/2),(width/2),(height/2));
}
void keyPressed() {
if (key == 'r' || key == 'R') {
recording = !recording;
println("Recording is " + (recording ? "ON" : "OFF"));
} else if (key == 's' || key == 's') {
i++;
videoExport = new VideoExport(this, "video" + i + ".mp4");
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
}
void movieEvent(Movie m) {
m.read();
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data, you must call
// beginRecord() and endRecord() on the AudioRecorder object. You can start and stop
// as many times as you like, the audio data will be appended to the end of the buffer
// (in the case of buffered recording) or to the end of the file (in the case of streamed recording).
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
/*#######################################*/
newFile();
/*#######################################*/
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to the file we specified in createRecorder
// in the case of buffered recording, if the buffer is large,
// this will appear to freeze the sketch for sometime
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// the method returns the recorded audio as an AudioRecording,
// see the example AudioRecorder >> RecordAndPlayback for more about that
name++; //change the file name, everytime +1
recorder.save();
println("Done saving.");
println(name);//check the name
}
}
void stop()
{
// always close Minim audio classes when you are done with them
in.close();
minim.stop();
super.stop();
}
Can you do that from within processing?
Sure. Just google something like "Java move file" and I'm sure you'll find a ton of results. Or you could just save the video to the data directory in the first place. I've never used the VideoExport class so this is just a guess, but I'd imagine that this would put the video in the data directory:
videoExport = new VideoExport(this, "data/video" + i + ".mp4");
And how can i load up the videos that i just created in a case before? Do i need to use an array for that?
I'm not sure I understand this question, but you can use any variable you want. Just keep track of where the files are going, and then load them from there.

How to play sound onto microphone?

I want to make a soundboard in the Processing language that plays sounds so the computer handles the sounds as if they were inputs from my microphone. This is my only problem about doing a soundboard. How do I make the sounds play as if they were recorded by the microphone?
I have spent an hour searching and trying to get help, but I have nothing to work with.
Minim provides the class AudioInput for monitoring the user’s current record source (this is often set in the sound card control panel), such as the microphone or the line-in
from
http://code.compartmental.net/tools/minim/quickstart/
EDIT:
Have you seen this?
import ddf.minim.*;
import ddf.minim.ugens.*;
Minim minim;
// for recording
AudioInput in;
AudioRecorder recorder;
// for playing back
AudioOutput out;
FilePlayer player;
void setup()
{
size(512, 200, P3D);
minim = new Minim(this);
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create an AudioRecorder that will record from in to the filename specified.
// the file will be located in the sketch's main folder.
recorder = minim.createRecorder(in, "myrecording.wav");
// get an output we can playback the recording on
out = minim.getLineOut( Minim.STEREO );
textFont(createFont("Arial", 12));
}
void draw()
{
background(0);
stroke(255);
// draw the waveforms
// the values returned by left.get() and right.get() will be between -1 and 1,
// so we need to scale them up to see the waveform
for(int i = 0; i < in.left.size()-1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Now recording...", 5, 15);
}
else
{
text("Not recording.", 5, 15);
}
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data,
// you must callstartRecording() and stopRecording() on the AudioRecorder object.
// You can start and stop as many times as you like, the audio data will
// be appended to the end of to the end of the file.
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to a file of the type we specified in setup
// in the case of buffered recording,
// this will appear to freeze the sketch for sometime, if the buffer is large
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// save returns the recorded audio in an AudioRecordingStream,
// which we can then play with a FilePlayer
if ( player != null )
{
player.unpatch( out );
player.close();
}
player = new FilePlayer( recorder.save() );
player.patch( out );
player.play();
}
}
It's from here:
http://code.compartmental.net/minim/audiorecorder_class_audiorecorder.html

Resources