Related
Code below is a test program, emulating some events processing. Full code snippets is here. There are 3 threads for event producer, for event distributor like event bus, what handles system event too, and for a certain game, what handles significant events.
And I have stdout printer in event bus, what method pass into every event as a callback. The event bus thread:
class EventBus : public std::enable_shared_from_this<EventBus>
{
using handlers = boost::signals2::signal<void(std::shared_ptr<EventBase>)>;
private:
std::atomic<double> m_GameTicks;
std::thread m_Worker;
std::mutex m_Locker;
std::condition_variable m_NewEvent;
bool m_Suspend;
std::queue<std::shared_ptr<EventBase>> m_EventStorage;
std::unordered_map<std::type_index, handlers> m_Subscribers;
std::shared_ptr<IPrinter> m_InfoPrinter;
public:
EventBus()
{
m_Worker = std::thread([this]() /// Firing event
{
while (true)
{
std::shared_ptr<EventBase> event;
{
std::unique_lock guard(m_Locker);
m_NewEvent.wait(
guard,
[this]{ return m_Suspend || !m_EventStorage.empty(); });
if (m_Suspend && m_EventStorage.empty())
{
break;
}
event = std::move(m_EventStorage.front());
m_EventStorage.pop();
}
auto raw_event = event.get();
try
{
m_Subscribers[typeid(*raw_event)](event);
}
catch (std::exception& ex)
{
std::cerr << ex.what() << std::endl;
std::exit(EXIT_FAILURE);
}
}
});
}
~EventBus()
{
m_Worker.join();
}
AddEvent(std::shared_ptr<EventBase>&& event)
{
event->SetPrintingCallback(
[/*ptr = shared_from_this()*/ this](auto&& message)
{
return m_InfoPrinter->SafetyPrint(
m_GameTicks,
std::forward<decltype(message)>(message));
});
{
std::lock_guard guard(m_Locker);
m_EventStorage.push(std::move(event));
}
m_NewEvent.notify_one();
}
template <typename TEvent, typename TSubscriber> // TODO add enable_if
void Subscribe(const std::shared_ptr<TSubscriber>& subscriber)
{
m_Subscribers[typeid(TEvent)].connect(
[subscriber](std::shared_ptr<EventBase> event)
{
subscriber->ProcessEvent(std::static_pointer_cast<TEvent>(event));
});
}
template <typename TEvent, typename TFunc>
void Subscribe(const TFunc& subscriber_callback)
{
m_Subscribers[typeid(TEvent)].connect(
[subscriber_callback](std::shared_ptr<EventBase> event)
{
subscriber_callback(std::static_pointer_cast<TEvent>(event));
});
}
template <typename TEvent>
void SubscribeSelf()
{
m_Subscribers[typeid(TEvent)].connect(
[this](std::shared_ptr<EventBase> event)
{
ProcessEvent(std::static_pointer_cast<TEvent>(event));
});
}
};
There is SIGSEGV at this variant sometimes of AddEvent, but if I pass shared_form_this, it will be called either never, or sometimes, processed one or two events. But I don't why does it happen, and the first, and the second variant?
I try to debug and I see that boost.signal2 disconnects handlers at the second variant with shared_from_this passing.
The game thread:
class GameMap : public std::enable_shared_from_this<GameMap>
{
public:
GameMap(const MapPoint& map_size, const Key<GameMapFactory>&)
: m_MapSize(map_size)
, m_Work(std::make_unique<dummy_game_work_type>(m_GameContext.get_executor()))
{
m_GameThread = std::thread([this] ()
{
m_GameContext.run();
});
}
~GameMap()
{
m_Work.reset(); // need transfer this to processing finish event?
m_GameThread.join();
}
};
GameMap handles events into ProcessEvent method, does some validation and post it handlers via boost::asio::post for further processing.
The main thread:
int main(int ac, char **av)
{
std::shared_ptr<GameMap> map;
std::shared_ptr<EventBus> bus = std::make_shared<EventBus (std::make_shared<StdOutPrinter>());
bus->Subscribe<MapCreationEvent>(
[&map, &bus](auto&& event) // todo check about references in capture list
{
map = GameMapFactory{}.CreateMap(std::forward<decltype(event)>(event));
bus->Subscribe<MarchEvent>(map);
// todo think about replace this to Game Map Factory
});
bus->Subscribe<SpawnCreatureEvent>(
[](auto&& event)
{
CreatureFactory{}.CreateCreature(std::forward<decltype(event)>(event));
});
// bus->Subscribe<WaitEvent>(bus); // todo it doesn't work too
// bus->Subscribe<WaitEvent>( // todo why it doesn't work subscription to self slot??
// [bus](auto&& event)
// {
// bus->ProcessEvent(std::forward<decltype(event)>(event));
// });
bus->SubscribeSelf<WaitEvent>();
bus->SubscribeSelf<FinishEvent>();
// todo test producer
std::vector<std::shared_ptr<EventBase>> events = {
std::make_shared<MapCreationEvent>(MapPoint(40, 40)),
std::make_shared<SpawnCreatureEvent>(1, MapPoint(40, 40), 50),
std::make_shared<SpawnCreatureEvent>(2, MapPoint(30, 40), 100),
std::make_shared<SpawnCreatureEvent>(3, MapPoint(10, 20), 70),
std::make_shared<SpawnCreatureEvent>(4, MapPoint(40, 30), 90),
std::make_shared<MarchEvent>(1, MapPoint(40, 30)),
std::make_shared<MarchEvent>(2, MapPoint(20, 20)),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<WaitEvent>(1),
std::make_shared<MarchEvent>(4, MapPoint(20, 20)),
std::make_shared<FinishEvent>(),
};
for (auto& ev : events)
{
bus->AddEvent(std::move(ev));
}
}
I caught similar problem here. If I subscribe bus to handling Wait and Finish events via Subscribe, I will get the similar behaviour I wrote above: signals2 disconnect all my slots and stop the program. Why?
Could somebody help me with disconnecting and sigsegv?
I tried to change passing this to shared_from_this but I got only other error
I have a webcam feed in my processing sketch and i can record and save the video. What i wanna accomplish is that when i go to the next case (drawScreenOne) that the video i just recorded will show up on the canvas. The problem that i have now, is that when i save the video, with the video export library from com.hamoid, it gets saved in the same folder as my sketch, but to play a movie it needs to be in the data folder. So i can't play the movies without it manually moving to the data folder. Can you do that from within processing?
And how can i load up the videos that i just created in a case before? Do i need to use an array for that? I can play the movies when i manually move it to the data folder but i want processing to handle that.
this is the code i have so far:
import com.hamoid.*;
import processing.video.*;
import ddf.minim.*;
Minim minim;
AudioInput in;
AudioRecorder recorder;
Movie myMovie;
Movie myMovie1;
int currentScreen;
VideoExport videoExport;
boolean recording = false;
Capture theCap;
Capture cam;
int i = 0;
int countname; //change the name
int name = 000000; //set the number in key's' function
// change the file name
void newFile()
{
countname =( name + 1);
recorder = minim.createRecorder(in, "file/Sound" + countname + ".wav", true);
// println("file/" + countname + ".wav");
}
void setup() {
size(500,500);
frameRate(30);
noStroke();
smooth();
myMovie = new Movie(this, "video0.mp4");
myMovie.loop();
myMovie1 = new Movie(this, "video1.mp4");
myMovie1.loop();
String[] cameras = Capture.list();
if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
// The camera can be initialized directly using an
// element from the array returned by list():
//cam = new Capture(this, cameras[3]); //built in mac cam "isight"
cam = new Capture(this, 1280, 960, "USB-camera"); //externe camera Lex, linker USB
cam.start();
}
println("Druk op R om geluid en video op te nemen.Druk nog een keer op R om het opnemen te stoppen en druk op S om het op te slaan Druk vervolgens op Z om verder te gaan.");
videoExport = new VideoExport(this, "video" + i + ".mp4");
minim = new Minim(this);
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create a recorder that will record from the input to the filename specified, using buffered recording
// buffered recording means that all captured audio will be written into a sample buffer
// then when save() is called, the contents of the buffer will actually be written to a file
// the file will be located in the sketch's root folder.
newFile();//go to change file name
textFont(createFont("SanSerif", 12));
}
void draw() {
switch(currentScreen){
case 0: drawScreenZero(); break; //camera
case 1: drawScreenOne(); break; //1 video
case 2: drawScreenZero(); break; //camera
case 3: drawScreenTwo(); break; // 2 video's
case 4: drawScreenZero(); break; //camera
case 5: drawScreenThree(); break; //3 video's
case 6: drawScreenZero(); break; //camera
case 7: drawScreenFour(); break; //4 video's
default: background(0); break;
}
}
void mousePressed() {
currentScreen++;
if (currentScreen > 2) { currentScreen = 0; }
}
void drawScreenZero() {
println("drawScreenZero camera");
if (cam.available() == true) {
cam.read();
}
image(cam, 0,0,width, height);
// The following does the same, and is faster when just drawing the image
// without any additional resizing, transformations, or tint.
//set(0, 0, cam);
if (recording) {
videoExport.saveFrame();
}
for(int i = 0; i < in.bufferSize() - 1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Aan het opnemen...", 5, 15);
text("Druk op R als je klaar bent met opnemen en druk op S om het op te slaan.", 5, 30);
}
else
{
text("Gestopt met opnemen. Druk op R om op te nemen, druk op S om op te slaan.", 5, 15);
}
}
void drawScreenOne() {
background(0,255,0);
//fill(0);
//rect(250,40,250,400);
println("drawScreenOne 1 video");
image(myMovie, 0,0, (width/2),(height/2));
}
void drawScreenTwo(){
background(0,0,255);
println("drawScreenTwo 2 videos");
//triangle(150,100,150,400,450,250);
image(myMovie, 0,0, (width/2),(height/2));
image(myMovie1, (width/2),(height/2),(width/2),(height/2));
}
void drawScreenThree(){
//fill(0);
//rect(250,40,250,400);
background(255,0,0);
println("drawScreenThree 3 videos");
image(myMovie, 0,0, (width/2),(height/2));
image(myMovie1, (width/2),(height/2),(width/2),(height/2));
image(myMovie, (width/2),0, (width/2),(height/2));
}
void drawScreenFour(){
//triangle(150,100,150,400,450,250);
background(0,0,255);
println("drawScreenFour 4 videos");
image(myMovie, 0,0, (width/2),(height/2));
image(myMovie1, (width/2),(height/2),(width/2),(height/2));
image(myMovie, (width/2),0, (width/2),(height/2));
image(myMovie1, 0,(height/2),(width/2),(height/2));
}
void keyPressed() {
if (key == 'r' || key == 'R') {
recording = !recording;
println("Recording is " + (recording ? "ON" : "OFF"));
} else if (key == 's' || key == 's') {
i++;
videoExport = new VideoExport(this, "video" + i + ".mp4");
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
}
void movieEvent(Movie m) {
m.read();
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data, you must call
// beginRecord() and endRecord() on the AudioRecorder object. You can start and stop
// as many times as you like, the audio data will be appended to the end of the buffer
// (in the case of buffered recording) or to the end of the file (in the case of streamed recording).
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
/*#######################################*/
newFile();
/*#######################################*/
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to the file we specified in createRecorder
// in the case of buffered recording, if the buffer is large,
// this will appear to freeze the sketch for sometime
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// the method returns the recorded audio as an AudioRecording,
// see the example AudioRecorder >> RecordAndPlayback for more about that
name++; //change the file name, everytime +1
recorder.save();
println("Done saving.");
println(name);//check the name
}
}
void stop()
{
// always close Minim audio classes when you are done with them
in.close();
minim.stop();
super.stop();
}
Can you do that from within processing?
Sure. Just google something like "Java move file" and I'm sure you'll find a ton of results. Or you could just save the video to the data directory in the first place. I've never used the VideoExport class so this is just a guess, but I'd imagine that this would put the video in the data directory:
videoExport = new VideoExport(this, "data/video" + i + ".mp4");
And how can i load up the videos that i just created in a case before? Do i need to use an array for that?
I'm not sure I understand this question, but you can use any variable you want. Just keep track of where the files are going, and then load them from there.
I want to make a soundboard in the Processing language that plays sounds so the computer handles the sounds as if they were inputs from my microphone. This is my only problem about doing a soundboard. How do I make the sounds play as if they were recorded by the microphone?
I have spent an hour searching and trying to get help, but I have nothing to work with.
Minim provides the class AudioInput for monitoring the user’s current record source (this is often set in the sound card control panel), such as the microphone or the line-in
from
http://code.compartmental.net/tools/minim/quickstart/
EDIT:
Have you seen this?
import ddf.minim.*;
import ddf.minim.ugens.*;
Minim minim;
// for recording
AudioInput in;
AudioRecorder recorder;
// for playing back
AudioOutput out;
FilePlayer player;
void setup()
{
size(512, 200, P3D);
minim = new Minim(this);
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create an AudioRecorder that will record from in to the filename specified.
// the file will be located in the sketch's main folder.
recorder = minim.createRecorder(in, "myrecording.wav");
// get an output we can playback the recording on
out = minim.getLineOut( Minim.STEREO );
textFont(createFont("Arial", 12));
}
void draw()
{
background(0);
stroke(255);
// draw the waveforms
// the values returned by left.get() and right.get() will be between -1 and 1,
// so we need to scale them up to see the waveform
for(int i = 0; i < in.left.size()-1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Now recording...", 5, 15);
}
else
{
text("Not recording.", 5, 15);
}
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data,
// you must callstartRecording() and stopRecording() on the AudioRecorder object.
// You can start and stop as many times as you like, the audio data will
// be appended to the end of to the end of the file.
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to a file of the type we specified in setup
// in the case of buffered recording,
// this will appear to freeze the sketch for sometime, if the buffer is large
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// save returns the recorded audio in an AudioRecordingStream,
// which we can then play with a FilePlayer
if ( player != null )
{
player.unpatch( out );
player.close();
}
player = new FilePlayer( recorder.save() );
player.patch( out );
player.play();
}
}
It's from here:
http://code.compartmental.net/minim/audiorecorder_class_audiorecorder.html
I am trying to use Boost Conditional variable in my application to synchronize two different threads as following:
The main thread, will create a TCP server and instance of object called MIH-User and register a callback to an event_handler.
Main.cpp
/**
* Default MIH event handler.
*
* #param msg Received message.
* #param ec Error code.
*/
void event_handler(odtone::mih::message &msg, const boost::system::error_code &ec)
{
if (ec)
{
log_(0, __FUNCTION__, " error: ", ec.message());
return;
}
switch (msg.mid())
{
// Source Server received HO Complete Message
case odtone::mih::indication::n2n_ho_complete:
{
if (ec)
{
log_(0, __FUNCTION__, " error: ", ec.message());
return;
}
mih::id mobile_id; // Mobile node MIHF ID TLV
mih::link_tuple_id source_id; // Source Link ID TLV
mih::link_tuple_id target_id; // Target Link ID TLV
mih::ho_result ho_res; // Handover Result TLV
// Deserialize received MIH message "N2N Handover Complete Indication"
msg >> mih::indication()
& mih::tlv_mobile_node_mihf_id(mobile_id)
& mih::tlv_link_identifier(source_id)
& mih::tlv_new_link_identifier(target_id)
& mih::tlv_ho_result(ho_res);
log_(0, "has received a N2N_HO_Complete.Indication with HO-Result=", ho_res.get(),
" from ", msg.source().to_string(), ", for Mobile-IP=", mobile_id.to_string());
// Find the source transaction which corresponds to this Indication
src_transaction_ptr t;
tpool->find(msg.source(), mobile_id.to_string(), t);
{
boost::lock_guard<boost::mutex> lock(t->mut);
t->response_received = true;
t->ho_complete_result = ho_res;
t->tid = msg.tid();
}
t->cond.notify_one();
}
break;
}
}
int main(int argc, char **argv)
{
odtone::setup_crash_handler();
boost::asio::io_service ios;
sap::user usr(cfg, ios, boost::bind(&event_handler, _1, _2));
mMihf = &usr;
// Register the MIH-Usr with the local MIHF
register_mih_user(cfg);
// Pool of pending transactions with peer mihfs
ho_transaction_pool pool(ios);
tpool = &pool;
// The io_service object provides I/O services, such as sockets,
// that the server object will use.
tcp_server server(ios, cfg.get<ushort>(kConf_Server_Port));
}
The TCP server will listen for new incoming connections and upon the reception of a new connection it will create a new thread corresponding to a source transaction machine also it will add it to a common transaction pool as following:
TCP Server
void handle_request(std::string arg1,std::string arg2)
{
src_transaction_ptr t(new src_transaction(arg1, arg2));
tpool->add(t);
t->run();
}
void handle_read(const boost::system::error_code &error, size_t bytes_transferred)
{
if (!error)
{
// Split received message defining ";" as a delimiter
std::vector<std::string> strs;
boost::split(strs, mMessage, boost::is_any_of(":"));
log_(0, "Received Message from TCP Client: ", mMessage);
// The first value is the HO Command Initiation message
if ((strs.at(0).compare("INIT") == 0) && (strs.size() == 3))
{
// The second value is the MIHF ID and the third is the IP address
// Start Source transaction if we receive "Init-Message"
boost::thread thrd(&tcp_connection::handle_request, this, strs.at(1), strs.at(2));
}
else if ((strs.at(0).compare("TEST") == 0) && (strs.size() == 3))
{
int max_iterations = atoi(strs.at(2).c_str());
for (int i = 1; i <= max_iterations; i++)
{
boost::thread thrd(&tcp_connection::handle_request,
this, strs.at(1), boost::lexical_cast<std::string>(i));
}
}
else
log_(0, "Error: Unrecognized message.");
memset(&mMessage[0], 0, max_length);
mSocket.async_read_some(boost::asio::buffer(mMessage, max_length),
boost::bind(&tcp_connection::handle_read, shared_from_this(),
boost::asio::placeholders::error,
boost::asio::placeholders::bytes_transferred));
}
}
The source transaction machine will move between different states and in one of the states it will have to freeze the execution until it receives an indication through the main thread which is the "n2n_ho_complete" at this time, it will set the response_received to ture as following:
Source Transaction Machine
/**
* Run Source State Machine transaction.
*/
void src_transaction::run()
{
// Previuos states.
wait_ho_complete_indication_state:
{
log_(1, "is in SRC_WAIT_HO_COMPLETE_INDICATION State for Mobile IP=", ip_address);
mState = SRC_WAIT_HO_COMPLETE_INDICATION;
boost::unique_lock<boost::mutex> lock(mut);
while (!response_received)
{
cond.wait(lock);
}
response_received = false;
// Do some stuff
}
// Other states
return;
}
The response_received is a public variable and each instance of the class has its own variable. When an indication is received through the main thread, it will look for the source transaction that matches that indication and sets its response_received to true.
So my problem is: whenever I try to execute the code, the whole program hangs on the wait_ho_complete_indication_state ,and the program doesn't respond to anything.
And for example if I request the creation of a 10 threads for a source transaction. The program will create all of them and they start to work concurrently, until one of them reaches the wait_ho_complete_indication_state , then everything freezes. Even the main thread doesn't respond at all, even if it received an indication throught the event_handler.
So is my code correct for using the conditional variable?
Please help with this issue.
Thanks a lot.
I am trying to create an app that would allow the user some sounds and then use this in a playback fashion.
I would like to have my application play a .wav file that the user will record.
I am having trouble figuring out how to code this, as I keep getting a error.
==== JavaSound Minim Error ====
==== Error invoking createInput on the file loader object: null
Snippet of code:
import ddf.minim.*;
AudioInput in;
AudioRecorder recorder;
RadioButtons r;
boolean showGUI = false;
color bgCol = color(0);
Minim minim;
//Recording players
AudioPlayer player1;
AudioPlayer player2;
void newFile()
{
countname =(name+1);
recorder = minim.createRecorder(in, "data/" + countname + ".wav", true);
}
......
void setup(){
minim = new Minim(this);
in = minim.getLineIn(Minim.MONO, 2048);
newFile();
player1 = minim.loadFile("data/" + countname + ".wav");// recording #1
player2 = minim.loadFile("data/" + countname + ".wav");//recording #2
void draw() {
// Draw the image to the screen at coordinate (0,0)
image(img,0,0);
//recording button
if(r.get() == 0)
{
for(int i = 0; i < in.left.size()-1; i++)
}
if ( recorder.isRecording() )
{
text("Currently recording...", 5, 15);
}
else
{
text("Not recording.", 5, 15);
}
}
//play button
if(r.get() == 1)
{
if(mousePressed){
.......
player_1.cue(0);
player_1.play();
}
if(mousePressed){
.......
player_2.cue(0);
player_2.play();
}
}
The place where I have a problem is here:
player1 = minim.loadFile("data/" + countname + ".wav");// recording #1
player2 = minim.loadFile("data/" + countname + ".wav");//recording #2
The files that will be recorded will be 1.wav, 2.wav. But I can not place this in the
player1.minim.loadFile ("1.wav");
player2.mminim.loadFile("2.wav");
How would I do this?
As indicated in the JavaDoc page for AudioRecorder [1], calls to beginRecord(), endRecord() and save() will need to happen so that whatever you want to record is actually recorded and then also saved to disk. As long as that does not happen there is nothing for loadFile() to load and you will therefore receive errors. So the problem lies in your program flow. Only when your program reaches a state where a file has already been recorded and saved, you can actually load that.
There are probably also ways for you to play back whatever is being recorded right at the moment it arrives in your audio input buffer (one would usually refer to such as 'monitoring'), but as i understand it, that is not what you want.
Aside this general conceptual flaw there also seem to be other problems in your code, e.g. countname is not being iterated between two subsequent loadFile calls (I assume that it should be iterated though); Also at some point you have "player_1.play();" (note the underscore), although you're probably refering to this, differently written variable earlier initialized with "player1 = minim.loadFile(...)" ? ...
[1] http://code.compartmental.net/minim/javadoc/ddf/minim/AudioRecorder.html
This is the approach to record from an audio file into an AudioRecorder object. You load a file, play it and then you choose what section to save into another file that you can play using and AudioPlayer object or your favorite sound player offered by your OS.
Related to
I am having trouble figuring out how to code this, as I keep getting a
error.
Despite it says it is an error, it doesn't affect executing your program. I would consider this a warning and ignore it. If you want to fix it, I believe you will need to edit the file's tags to properly set their values.
INSTRUCTIONS: In the code, define your file to play. When you run the sketch, press r to begin recording, r again to stop recording. Don't forget to press s to save the file to an audio file which will be located in the data folder.
NOTE: If you need to play wav files, you will need a Sampler object instead of a FilePlayer one.
//REFERENCE: https:// forum.processing.org/one/topic/how-can-i-detect-sound-with-my-mic-in-my-computer.html
//REFERENCE: https:// forum.processing.org/two/discussion/21842/is-it-possible-to-perform-fft-with-fileplayer-object-minim
/**
* This sketch demonstrates how to use an <code>AudioRecorder</code> to record audio to disk.
* Press 'r' to toggle recording on and off and the press 's' to save to disk.
* The recorded file will be placed in the sketch folder of the sketch.
* <p>
* For more information about Minim and additional features,
* visit http://code.compartmental.net/minim/" target="_blank" rel="nofollow">http://code.compartmental.net/minim/</a>;
*/
import ddf.minim.*;
import ddf.minim.ugens.*;
import ddf.minim.analysis.*;
Minim minim;
FilePlayer player;
AudioOutput out;
AudioRecorder recorder;
void setup()
{
size(512, 200, P3D);
textFont(createFont("Arial", 12));
minim = new Minim(this);
player = new FilePlayer(minim.loadFileStream("energeticDJ.mp3"));
// IT DOESN'T WORK FOR WAV files ====> player = new FilePlayer(minim.loadFileStream("fair1939.wav"));
out = minim.getLineOut();
TickRate rateControl = new TickRate(1.f);
player.patch(rateControl).patch(out);
recorder = minim.createRecorder(out, dataPath("myrecording.wav"),true);
player.loop(0);
}
void draw()
{
background(0);
stroke(255);
// draw a line to show where in the song playback is currently located
float posx = map(player.position(), 0, player.length(), 0, width);
stroke(0, 200, 0);
line(posx, 0, posx, height);
if ( recorder.isRecording() )
{
text("Currently recording...", 5, 15);
} else
{
text("Not recording.", 5, 15);
}
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data, you must call
// beginRecord() and endRecord() on the AudioRecorder object. You can start and stop
// as many times as you like, the audio data will be appended to the end of the buffer
// (in the case of buffered recording) or to the end of the file (in the case of streamed recording).
if ( recorder.isRecording() )
{
recorder.endRecord();
} else
{
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to the file we specified in createRecorder
// in the case of buffered recording, if the buffer is large,
// this will appear to freeze the sketch for sometime
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// the method returns the recorded audio as an AudioRecording,
// see the example AudioRecorder >> RecordAndPlayback for more about that
recorder.save();
println("Done saving.");
}
}