Recording with BluetoothHeadset Mic - android-studio

Hei there,
I m trying to make an App with Android-Studio that can record sound using a Bluetooth-HS.
I know there are a lot of posts close to this, but i tried all the answers and it wont work for me.
My code gives me back a filled bytebuffer, however testing proves, its always the Phones Mic not the Headset-Mic.
If anyone could take a look at my code and point out why it wont use the BT-HS, that would be a huge help for me.
public class Inhalation extends AppCompatActivity {
AudioManager audioManager;
AudioRecord audioRecord=null;
Button mrecord;
Button mpause;
boolean isRecording=false;
private Thread recordingThread = null;
private int bufferSize = AudioRecord.getMinBufferSize(8000,AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT);
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_inhalation);
mrecord= findViewById(R.id.Button_Record_ID);
mpause=findViewById(R.id.Button_Pause_ID);
audioManager =(AudioManager) this.getSystemService(this.AUDIO_SERVICE);
}
//is supposed to start recording using the BT MIC. Can only be called if BTSCO is connected
private void startRecording() {
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioRecord.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
//picks up the recorded audiobuffer and writes it into a file
private void writeAudioDataToFile() {
String filename="record";
byte saudioBuffer[] = new byte[bufferSize];
FileOutputStream os = null;
// TODO (4) Audiorecord Filecreation
try {
os = openFileOutput(filename, Context.MODE_PRIVATE);
} catch (FileNotFoundException e) {
e.printStackTrace();
Log.d("headset_rec","false filepath");
}
while (isRecording) {
audioRecord.read(saudioBuffer, 0, bufferSize);
try {
os.write(saudioBuffer, 0, bufferSize);
// os.write(saudioBuffer);
Log.d("headset_rec","writing"+saudioBuffer[0]);
} catch (IOException e) {
e.printStackTrace();
Log.d("headset_rec","writefail");
}
}
try {
os.close();
} catch (IOException e) {
Log.d("headset_rec","close");
e.printStackTrace();
}
}
//stops the recording
private void stopRecording() {
// stops the recording activity
if (null != audioRecord) {
isRecording = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
}
}
public void Record_On_Click(View view){
mpause.setEnabled(true);
mrecord.setEnabled(false);
requestRecordAudioPermission();
startRecording();
}
//Button to pause
public void Record_Pause_Click(View view){
stopRecording();
// readFromFile();
mrecord.setEnabled(true);
mpause.setEnabled(false);
}
//if BluetoothSCO is connected enables recording
private BroadcastReceiver mBluetoothScoReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, -1);
System.out.println("ANDROID Audio SCO state: " + state);
if (AudioManager.SCO_AUDIO_STATE_CONNECTED == state) {
Log.d("SCOO","connected");
mrecord.setEnabled(true);
}
if(AudioManager.SCO_AUDIO_STATE_DISCONNECTED==state){
Log.d("SCOO","disconnected");
mrecord.setEnabled(false);
}
}
};
//connects to the bluetoothHeadset doing the following:
#Override
protected void onResume() {
// TODO (5) Bluetooth Mik
// Start Bluetooth SCO.
if(isRecording){
mpause.setEnabled(true);
mrecord.setEnabled(false);
}
IntentFilter intentFilter = new IntentFilter(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
registerReceiver(mBluetoothScoReceiver, intentFilter);
audioManager.setMode(audioManager.MODE_NORMAL);
audioManager.setBluetoothScoOn(true);
audioManager.startBluetoothSco();
// Stop Speaker.
audioManager.setSpeakerphoneOn(false);
super.onResume();
}
//Disconnects from the Bluetoothheadset doing the following
#Override
protected void onDestroy() {
audioManager.stopBluetoothSco();
audioManager.setMode(audioManager.MODE_NORMAL);
audioManager.setBluetoothScoOn(false);
// Start Speaker.
audioManager.setSpeakerphoneOn(true);
unregisterReceiver(mBluetoothScoReceiver);
super.onDestroy();
}
private void requestRecordAudioPermission() {//gets the permission to record audio
//check API version, do nothing if API version < 23!
int currentapiVersion = android.os.Build.VERSION.SDK_INT;
if (currentapiVersion > android.os.Build.VERSION_CODES.LOLLIPOP){
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
Log.d("Activity_Request", "Wastn granted!");
// Should we show an explanation?
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.RECORD_AUDIO)) {
Log.d("Activity_Request", "request!");
// Show an expanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
} else {
// No explanation needed, we can request the permission.
Log.d("Activity_Request", "take!");
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, 1);
}
}
}
}

Related

Synchronously Send and Receive Bluetooth Data using Kotlin Coroutines

This has been the method so far to send and receive on bluetooth using Java with threading. But how do we do this using Kotlin's latest Coroutines? Alot of this old Java cold no longer translates to Kotlin 1.4+ either in terms of how to do threading. I read Kotlin is now using Coroutines instead of threads like before.
public class MainActivity extends AppCompatActivity {
private static final UUID MY_UUID_INSECURE =
UUID.fromString("8ce255c0-200a-11e0-ac64-0800200c9a66")
public void pairDevice(View v) {
Set<BluetoothDevice> pairedDevices = bluetoothAdapter.getBondedDevices();
if (pairedDevices.size() > 0) {
Object[] devices = pairedDevices.toArray();
BluetoothDevice device = (BluetoothDevice) devices[0]
ConnectThread connect = new ConnectThread(device,MY_UUID_INSECURE);
connect.start();
}
}
private class ConnectThread extends Thread {
private BluetoothSocket mmSocket;
public ConnectThread(BluetoothDevice device, UUID uuid) {
mmDevice = device;
deviceUUID = uuid;
}
public void run(){
BluetoothSocket tmp = null;
// Get a BluetoothSocket for a connection with the
// given BluetoothDevice
try {
tmp = mmDevice.createRfcommSocketToServiceRecord(MY_UUID_INSECURE);
} catch (IOException e) {
}
mmSocket = tmp;
try {
// This is a blocking call and will only return on a
// successful connection or an exception
mmSocket.connect();
} catch (IOException e) {
// Close the socket
try {
mmSocket.close();
} catch (IOException e1) {
}
}
//will talk about this in the 3rd video
connected(mmSocket);
}
}
private void connected(BluetoothSocket mmSocket) {
// Start the thread to manage the connection and perform transmissions
mConnectedThread = new ConnectedThread(mmSocket);
mConnectedThread.start();
}
private class ConnectedThread extends Thread {
private final BluetoothSocket mmSocket;
private final InputStream mmInStream;
private final OutputStream mmOutStream;
public ConnectedThread(BluetoothSocket socket) {
mmSocket = socket;
InputStream tmpIn = null;
OutputStream tmpOut = null;
try {
tmpIn = mmSocket.getInputStream();
tmpOut = mmSocket.getOutputStream();
} catch (IOException e) {
e.printStackTrace();
}
mmInStream = tmpIn;
mmOutStream = tmpOut;
}
public void run(){
byte[] buffer = new byte[1024]; // buffer store for the stream
int bytes; // bytes returned from read()
// Keep listening to the InputStream until an exception occurs
while (true) {
// Read from the InputStream
try {
bytes = mmInStream.read(buffer);
final String incomingMessage = new String(buffer, 0, bytes);
runOnUiThread(new Runnable() {
#Override
public void run() {
view_data.setText(incomingMessage);
}
});
} catch (IOException e) {
Log.e(TAG, "write: Error reading Input Stream. " + e.getMessage() );
break;
}
}
}
public void write(byte[] bytes) {
String text = new String(bytes, Charset.defaultCharset());
Log.d(TAG, "write: Writing to outputstream: " + text);
try {
mmOutStream.write(bytes);
} catch (IOException e) {
}
}
}
public void SendMessage(View v) {
byte[] bytes = send_data.getText().toString().getBytes(Charset.defaultCharset());
mConnectedThread.write(bytes);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
send_data =(EditText) findViewById(R.id.editText);
view_data = (TextView) findViewById(R.id.textView);
if (bluetoothAdapter != null && !bluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new
Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
}
}
public void Start_Server(View view) {
AcceptThread accept = new AcceptThread();
accept.start();
}
private class AcceptThread extends Thread {
private final BluetoothServerSocket mmServerSocket;
public AcceptThread(){
BluetoothServerSocket tmp = null ;
try{
tmp = bluetoothAdapter.listenUsingInsecureRfcommWithServiceRecord("appname", MY_UUID_INSECURE);
}catch (IOException e){
}
mmServerSocket = tmp;
}
public void run(){
Log.d(TAG, "run: AcceptThread Running.");
BluetoothSocket socket = null;
try{
// This is a blocking call and will only return on a
// successful connection or an exception
socket = mmServerSocket.accept();
}catch (IOException e){
}
//talk about this is in the 3rd
if(socket != null){
connected(socket);
}
}
}

Not able to connect a Video Call - Agora.io

I am trying to make a video calling app for the first time. I am using Agora.io in android studio for video calling. The problem I am facing is I am not able to see the video of the person I am calling. I am perfectly getting my own from the front camera.
I am stuck on this issue for days.
Here is the code of Dashboard.java.
public class Dashboard extends AppCompatActivity {
private static final String TAG = "1";
private static final int PERMISSION_REQ_ID = 22;
// Permission WRITE_EXTERNAL_STORAGE is not mandatory
// for Agora RTC SDK, just in case if you wanna save
// logs to external sdcard.
private static final String[] REQUESTED_PERMISSIONS = {
Manifest.permission.READ_PHONE_STATE,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
};
private RtcEngine mRtcEngine;
private boolean mCallEnd;
private boolean mMuted;
private FrameLayout mLocalContainer;
private RelativeLayout mRemoteContainer;
private SurfaceView mLocalView;
private SurfaceView mRemoteView;
private ImageView mCallBtn;
private ImageView mMuteBtn;
private ImageView mSwitchCameraBtn;
/**
* Event handler registered into RTC engine for RTC callbacks.
* Note that UI operations needs to be in UI thread because RTC
* engine deals with the events in a separate thread.
*/
private final IRtcEngineEventHandler mRtcEventHandler = new IRtcEngineEventHandler() {
#Override
public void onJoinChannelSuccess(String channel, final int uid, int elapsed) {
runOnUiThread(new Runnable() {
#Override
public void run() {
}
});
}
#Override
public void onFirstRemoteVideoDecoded(final int uid, int width, int height, int elapsed) {
runOnUiThread(new Runnable() {
#Override
public void run() {
setupRemoteVideo(uid);
}
});
}
#Override
public void onUserOffline(final int uid, int reason) {
runOnUiThread(new Runnable() {
#Override
public void run() {
onRemoteUserLeft();
}
});
}
};
private void setupRemoteVideo(int uid) {
// Only one remote video view is available for this
// tutorial. Here we check if there exists a surface
// view tagged as this uid.
int count = mRemoteContainer.getChildCount();
View view = null;
for (int i = 0; i < count; i++) {
View v = mRemoteContainer.getChildAt(i);
if (v.getTag() instanceof Integer && ((int) v.getTag()) == uid) {
view = v;
}
}
if (view != null) {
return;
}
mRemoteView = RtcEngine.CreateRendererView(getBaseContext());
mRemoteContainer.addView(mRemoteView);
mRtcEngine.setupRemoteVideo(new VideoCanvas(mRemoteView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
mRemoteView.setTag(uid);
}
private void onRemoteUserLeft() {
removeRemoteVideo();
}
private void removeRemoteVideo() {
if (mRemoteView != null) {
mRemoteContainer.removeView(mRemoteView);
}
mRemoteView = null;
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_dashboard);
initUI();
// Ask for permissions at runtime.
// This is just an example set of permissions. Other permissions
// may be needed, and please refer to our online documents.
if (checkSelfPermission(REQUESTED_PERMISSIONS[0], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[1], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[2], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[3], PERMISSION_REQ_ID)) {
initEngineAndJoinChannel();
}
}
private void initUI() {
mLocalContainer = findViewById(R.id.local_video_view_container);
mRemoteContainer = findViewById(R.id.remote_video_view_container);
mCallBtn = findViewById(R.id.btn_call);
mMuteBtn = findViewById(R.id.btn_mute);
mSwitchCameraBtn = findViewById(R.id.btn_switch_camera);
}
private boolean checkSelfPermission(String permission, int requestCode) {
if (ContextCompat.checkSelfPermission(this, permission) !=
PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, REQUESTED_PERMISSIONS, requestCode);
return false;
}
return true;
}
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions, #NonNull int[] grantResults) {
if (requestCode == PERMISSION_REQ_ID) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED ||
grantResults[1] != PackageManager.PERMISSION_GRANTED ||
grantResults[2] != PackageManager.PERMISSION_GRANTED ||
grantResults[3] != PackageManager.PERMISSION_GRANTED) {
showLongToast("Need permissions " + Manifest.permission.RECORD_AUDIO +
"/" + Manifest.permission.CAMERA + "/" + Manifest.permission.WRITE_EXTERNAL_STORAGE
+ "/" + Manifest.permission.READ_PHONE_STATE);
finish();
return;
}
// Here we continue only if all permissions are granted.
// The permissions can also be granted in the system settings manually.
initEngineAndJoinChannel();
}
}
private void showLongToast(final String msg) {
this.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_LONG).show();
}
});
}
private void initEngineAndJoinChannel() {
// This is our usual steps for joining
// a channel and starting a call.
initializeEngine();
setupVideoConfig();
setupLocalVideo();
joinChannel();
}
private void initializeEngine() {
try {
mRtcEngine = RtcEngine.create(getBaseContext(), getString(R.string.app_id_agora), mRtcEventHandler);
} catch (Exception e) {
Log.e(TAG, Log.getStackTraceString(e));
throw new RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e));
}
}
private void setupVideoConfig() {
// In simple use cases, we only need to enable video capturing
// and rendering once at the initialization step.
// Note: audio recording and playing is enabled by default.
mRtcEngine.enableVideo();
// Please go to this page for detailed explanation
// https://docs.agora.io/en/Video/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_rtc_engine.html#af5f4de754e2c1f493096641c5c5c1d8f
mRtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
VideoEncoderConfiguration.VD_640x360,
VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15,
VideoEncoderConfiguration.STANDARD_BITRATE,
VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT));
}
private void setupLocalVideo() {
// This is used to set a local preview.
// The steps setting local and remote view are very similar.
// But note that if the local user do not have a uid or do
// not care what the uid is, he can set his uid as ZERO.
// Our server will assign one and return the uid via the event
// handler callback function (onJoinChannelSuccess) after
// joining the channel successfully.
mLocalView = RtcEngine.CreateRendererView(getBaseContext());
mLocalView.setZOrderMediaOverlay(true);
mLocalContainer.addView(mLocalView);
mRtcEngine.setupLocalVideo(new VideoCanvas(mLocalView, VideoCanvas.RENDER_MODE_HIDDEN, 0));
}
private void joinChannel() {
// 1. Users can only see each other after they join the
// same channel successfully using the same app id.
// 2. One token is only valid for the channel name that
// you use to generate this token.
String token = "12312323123123wedsa";
mRtcEngine.joinChannel(token, "brolChannelbrobro", "Extra Optional Data", 0);
}
#Override
protected void onDestroy() {
super.onDestroy();
if (!mCallEnd) {
leaveChannel();
}
RtcEngine.destroy();
}
private void leaveChannel() {
mRtcEngine.leaveChannel();
}
public void onLocalAudioMuteClicked(View view) {
mMuted = !mMuted;
mRtcEngine.muteLocalAudioStream(mMuted);
int res = mMuted ? R.drawable.btn_mute : R.drawable.btn_unmute;
mMuteBtn.setImageResource(res);
}
public void onSwitchCameraClicked(View view) {
mRtcEngine.switchCamera();
}
public void onCallClicked(View view) {
if (mCallEnd) {
startCall();
mCallEnd = false;
mCallBtn.setImageResource(R.drawable.btn_endcall);
} else {
endCall();
mCallEnd = true;
mCallBtn.setImageResource(R.drawable.btn_startcall);
}
showButtons(!mCallEnd);
}
private void startCall() {
setupLocalVideo();
joinChannel();
}
private void endCall() {
removeLocalVideo();
removeRemoteVideo();
leaveChannel();
}
private void removeLocalVideo() {
if (mLocalView != null) {
mLocalContainer.removeView(mLocalView);
}
mLocalView = null;
}
private void showButtons(boolean show) {
int visibility = show ? View.VISIBLE : View.GONE;
mMuteBtn.setVisibility(visibility);
mSwitchCameraBtn.setVisibility(visibility);
}
}
I had the same issue. In my case it was a layout problem, as I wasn't making the local video view gone and remote video view visible. I don't know if it still helps after all these years.

Yet another "Can't create handler inside thread that has not called Looper.prepare()" topic

I have this code which is an Activity that when started will check for internet connection, if there is a connection, then life goes on. Else a dialog appears to turn on the connection. However I made a thread that each 10 seconds will check for connection and in case the connection was lost it will display the dialog again.
package greensmartcampus.eu.smartcampususerfeedbackapp;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.provider.Settings;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import java.net.InetAddress;
public class HomeScreen extends AbstractPortraitActivity {
private static final int WIFI_REQUEST_CODE = 1;
private boolean networkSettingsDialogOpened = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_screen);
this.runOnUiThread(new Runnable() {
#Override
public void run() {
while (!HomeScreen.this.isInternetAvailable()) {
if (!networkSettingsDialogOpened)
HomeScreen.this.createNetErrorDialog();
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
}
(...)
private boolean isInternetAvailable() {
try {
final InetAddress ipAddr = InetAddress.getByName("google.com");
if (ipAddr.equals("")) {
return false;
} else {
return true;
}
} catch (Exception e) {
return false;
}
}
private void createNetErrorDialog() {
networkSettingsDialogOpened = true;
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage("You need a network connection to use this application. Please turn on mobile network or Wi-Fi in Settings.")
.setTitle("Unable to connect")
.setCancelable(false)
.setPositiveButton("Settings",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
Intent i = new Intent(Settings.ACTION_WIRELESS_SETTINGS);
startActivityForResult(i, WIFI_REQUEST_CODE);
}
}
)
.setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
HomeScreen.this.finish();
}
}
);
final AlertDialog alert = builder.create();
alert.show();
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == WIFI_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
networkSettingsDialogOpened = false;
Toast.makeText(HomeScreen.this, "Returned Ok",
Toast.LENGTH_LONG).show();
}
if (resultCode == RESULT_CANCELED) {
networkSettingsDialogOpened = false;
Toast.makeText(HomeScreen.this, "Returned Canceled",
Toast.LENGTH_LONG).show();
}
}
}
}
However I am getting the following error:
02-03 18:13:14.525 2683-2699/greensmartcampus.eu.smartcampususerfeedbackapp E/AndroidRuntime﹕ FATAL EXCEPTION: Thread-193
Process: greensmartcampus.eu.smartcampususerfeedbackapp, PID: 2683
java.lang.RuntimeException: Can't create handler inside thread that has not called Looper.prepare()
at android.os.Handler.<init>(Handler.java:200)
at android.os.Handler.<init>(Handler.java:114)
at android.app.Dialog.<init>(Dialog.java:108)
at android.app.AlertDialog.<init>(AlertDialog.java:125)
at android.app.AlertDialog$Builder.create(AlertDialog.java:967)
at greensmartcampus.eu.smartcampususerfeedbackapp.HomeScreen.createNetErrorDialog(HomeScreen.java:97)
at greensmartcampus.eu.smartcampususerfeedbackapp.HomeScreen.access$200(HomeScreen.java:15)
at greensmartcampus.eu.smartcampususerfeedbackapp.HomeScreen$1.run(HomeScreen.java:29)
Note: Line 97 is the one containing:
final AlertDialog alert = builder.create();
I googled alot, I am already using the cliche answer of runOnUiThread, but it doesn't fix it.
What am I missing?
The way you are checking the internet I guess you are causing your UI thread to sleep. You should do it like this.
Create one Handler and Thread running flag:
Handler mHandler = new Handler();
boolean isRunning = true;
Then, use this thread from your onCreate() method :
new Thread(new Runnable() {
#Override
public void run() {
while (isRunning) {
try {
Thread.sleep(10000);
mHandler.post(new Runnable() {
#Override
public void run() {
if(!HomeScreen.this.isInternetAvailable()){
if (!networkSettingsDialogOpened)
HomeScreen.this.createNetErrorDialog();
}
}
});
} catch (Exception e) {
}
}
}
}).start();
Change this method slightly
private boolean isInternetAvailable() {
try {
final InetAddress ipAddr = InetAddress.getByName("google.com");
if (ipAddr.equals("")) {
return false;
} else {
isRunning = true;
return true;
}
} catch (Exception e) {
return false;
}
}
You can't call Thread.sleep() from code that is running on the UI thread. This is your code:
this.runOnUiThread(new Runnable() {
#Override
public void run() {
while (!HomeScreen.this.isInternetAvailable()) {
if (!networkSettingsDialogOpened)
HomeScreen.this.createNetErrorDialog();
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
You jest need to run the bit of code that shows the Dialog on the UI thread. Try this instead:
new Thread(new Runnable() {
#Override
public void run() {
while (!HomeScreen.this.isInternetAvailable()) {
if (!networkSettingsDialogOpened)
// Show the Dialog on the UI thread
HomeScreen.this.runOnUiThread(new Runnable() {
#Override
public void run() {
HomeScreen.this.createNetErrorDialog();
}
});
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}).start();

dialog.show() crashes my application, why?

I'm new in adroid.
I like to do things when the color reach a value. I like (for example) show the alert if r is bigger than 30, but the application go in crash. Thank for very simple answares.
public class MainActivity extends Activity {
private AlertDialog dialog;
private AlertDialog.Builder builder;
private BackgroundColors view;
public class BackgroundColors extends SurfaceView implements Runnable {
public int grand=0;
public int step=0;
private boolean flip=true;
private Thread thread;
private boolean running;
private SurfaceHolder holder;
public BackgroundColors(Context context) {
super(context);
}
Inside this loop while running is true. is impossible to show dialogs ??
public void run() {
int r = 0;
while (running){
if (holder.getSurface().isValid()){
Canvas canvas = holder.lockCanvas();
if (r > 250)
r = 0;
r += 10;
if (r>30 && flip){
flip=false;
// *********************************
dialog.show();
// *********************************
// CRASH !!
}
try {
Thread.sleep(300);
}
catch(InterruptedException e) {
e.printStackTrace();
}
canvas.drawARGB(255, r, 255, 255);
holder.unlockCanvasAndPost(canvas);
}
}
}
public void start() {
running = true;
thread = new Thread(this);
holder = this.getHolder();
thread.start();
}
public void stop() {
running = false;
boolean retry = true;
while (retry){
try {
thread.join();
retry = false;
}
catch(InterruptedException e) {
retry = true;
}
}
}
public boolean onTouchEvent(MotionEvent e){
dialog.show();
return false;
}
protected void onSizeChanged(int xNew, int yNew, int xOld, int yOld){
super.onSizeChanged(xNew, yNew, xOld, yOld);
grand = xNew;
step =grand/15;
}
}
public void onCreate(Bundle b) {
super.onCreate(b);
view = new BackgroundColors(this);
this.setContentView(view);
builder = new AlertDialog.Builder(this);
builder.setMessage("ciao");
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
Log.d("Basic", "It worked");
}
});
dialog = builder.create();
}
public void onPause(){
super.onPause();
view.stop();
}
public void onResume(){
super.onResume();
view.start();
}
}
you cann't show dialog in thread.you should use handler for this.create a handler in main thread and send it to your thread and instead of dialog.show() in your thread you should send message to handler and in handleMessage method of handler write dialog.show().
example:
Handler handler = new Handler(){
#Override
public void handleMessage(Message msg) {
switch(msg.what) {
case 1:
dialog.show();
break;
}}};
and send message in thread:
handler.sendEmptyMessage(1);

Voice or Audio player for .amr file in Java ME

I am working on audio recording in Nokia S40 series mobiles. I am able to record the message, but I am not able to play the recorded audio message.
Can anyone help me how to code for voice player for recorded .amr audio files?
Did any one come across this issue?
Here is my working example of recording and playing sound,
public class VoiceRecordMidlet extends MIDlet {
private Display display;
public void startApp() {
display = Display.getDisplay(this);
display.setCurrent(new VoiceRecordForm());
}
public void pauseApp() {
}
public void destroyApp(boolean unconditional) {
notifyDestroyed();
}
}
class VoiceRecordForm extends Form implements CommandListener {
private StringItem message;
private StringItem errormessage;
private final Command record, play;
private Player player;
private byte[] recordedAudioArray = null;
public VoiceRecordForm() {
super("Recording Audio");
message = new StringItem("", "Select Record to start recording.");
this.append(message);
errormessage = new StringItem("", "");
this.append(errormessage);
record = new Command("Record", Command.OK, 0);
this.addCommand(record);
play = new Command("Play", Command.BACK, 0);
this.addCommand(play);
this.setCommandListener(this);
}
public void commandAction(Command comm, Displayable disp) {
if (comm == record) {
Thread t = new Thread() {
public void run() {
try {
player = Manager.createPlayer("capture://audio");
player.realize();
RecordControl rc = (RecordControl) player.getControl("RecordControl");
ByteArrayOutputStream output = new ByteArrayOutputStream();
rc.setRecordStream(output);
rc.startRecord();
player.start();
message.setText("Recording...");
Thread.sleep(5000);
message.setText("Recording Done!");
rc.commit();
recordedAudioArray = output.toByteArray();
player.close();
} catch (Exception e) {
errormessage.setLabel("Error");
errormessage.setText(e.toString());
}
}
};
t.start();
}
else if (comm == play) {
try {
ByteArrayInputStream recordedInputStream = new ByteArrayInputStream(recordedAudioArray);
Player p2 = Manager.createPlayer(recordedInputStream, "audio/basic");
p2.prefetch();
p2.start();
} catch (Exception e) {
errormessage.setLabel("Error");
errormessage.setText(e.toString());
}
}
}
}

Resources