I have an audio in the Main Activity and it is played every time I enter the Main Activity. I want it to play only once, regardless of whether you switch screens and come back! I want him to plays only the first time he enters the Main Activity and the other times he is null!
" '#SuppressLint("SetTextI18n")
private void getTimeFromAndroid() {
Calendar c = Calendar.getInstance();
timeOfDay = c.get(Calendar.HOUR_OF_DAY);
if(timeOfDay >= 0 && timeOfDay < 12) {
setBrightness(brilhoManha + 155);
saudacao.setText("BOM DIA, SEJA BEM-VINDO");
new Timer().schedule(new TimerTask() {
#Override
public void run() {
if (!flag) {
play = MediaPlayer.create(MainActivity.this, R.raw.audiobomdia);
play.start();
}
}
}, 2000);
}else if(timeOfDay >= 12 && timeOfDay < 18){
setBrightness(brilhoManha + 155);
saudacao.setText("BOA TARDE, SEJA BEM-VINDO");
new Timer().schedule(new TimerTask() {
#Override
public void run() {
if (!flag) {
play = MediaPlayer.create(MainActivity.this, R.raw.audioboatarde);
viewPager = findViewById(R.id.main);
}
}
}, 2000);
}else if(timeOfDay >= 18 && timeOfDay < 24){
saudacao.setText("BOA NOITE, SEJA BEM-VINDO");
setBrightness(brilhoNoite + 60);
new Timer().schedule(new TimerTask() {
#Override
public void run() {
if (!flag) {
play = MediaPlayer.create(MainActivity.this, R.raw.audioboanoite);
viewPager = findViewById(R.id.main);
}
}
}, 2000);
}else if(timeOfDay >= 0 && timeOfDay < 6){
saudacao.setText("BOA NOITE, SEJA BEM-VINDO");
setBrightness(brilhoNoite+60);
new Timer().schedule(new TimerTask() {
#Override
public void run() {
if (!flag) {
play = MediaPlayer.create(MainActivity.this, R.raw.audioboanoite);
viewPager = findViewById(R.id.main);
}
}
}, 2000);
}
} '"
This is my code in Main Activity, it is used to play an audio and a greeting message appears according to the time.
EDIT :
remove public int status = 0 from MainActivity. then create class G and add public static int status = 0 . that must be static :
public class G extends Application{
public static int status = 0;
public static SharedPreferences preferences;
#Override
public void onCreate() {
super.onCreate();
// if you want to not play even
// after exit from app, add these too
preferences = PreferenceManager.getDefaultSharedPreferences(this);
status = preferences.getInt("STATUS", 0);
}
}
then add android:name="your package name.G" to manifest in application block
like this :
<application
android:name="your package name.G"
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
then add this where that audio playing in MainActivity :
if(G.status==0) {
//write codes to play audio
G.status = 1;
}
if you want to not play even after exit from app, add this method to onCreate MainActivity :
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
preferences.edit().putInt("STATUS", G.status).apply();
}
return super.onKeyDown(keyCode, event);
}
Related
I have been trying to implement a way that the application detects wake word like "Hey google" or "Jarvis". I did some research and found out porcupine helps towards solving the wake word problem but now the problem is I can't seem to trigger startRecognition() to listen again for the user input and then carry forward with it. I still tried to trigger startRecognition() but then it was asking me to do speechRecognizer.Destroy() which I tried doing with the porcupine onDestroy method but then it just stopped working. Sorry if I confused anyone, I will attach my code I will really appreciate everyone's help as I have been trying to solve this problem for a while now.
Another question is what does the following line of code do?
PendingIntent contentIntent = PendingIntent.getActivity(
this,
0,
new Intent(this, MainActivity.class), // this line ?
0);
The code currently :(
public class PorcupineService extends Service {
private static final int REQUEST_RECORD_AUDIO_PERMISSION_CODE = 1;
private SpeechRecognizer speechRecognizer;
TextToSpeech textToSpeech;
String userResponse;
Float speechRate = 2f;
private static final String CHANNEL_ID = "PorcupineServiceChannel";
private PorcupineManager porcupineManager;
private int numUtterances;
private void createNotificationChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
NotificationChannel notificationChannel = new NotificationChannel(
CHANNEL_ID,
"Porcupine",
NotificationManager.IMPORTANCE_HIGH);
NotificationManager manager = getSystemService(NotificationManager.class);
manager.createNotificationChannel(notificationChannel);
}
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
createNotificationChannel();
PendingIntent pendingIntent = PendingIntent.getActivity(
this,
0,
new Intent(this, MainActivity.class),
0);
numUtterances = 0;
Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)
.setContentTitle("Wake word")
.setContentText("Service running")
.setSmallIcon(R.drawable.ic_launcher_foreground)
.setContentIntent(pendingIntent)
.build();
startForeground(1234, notification);
try {
porcupineManager = new PorcupineManager.Builder()
.setKeyword(Porcupine.BuiltInKeyword.JARVIS)
.setSensitivity(0.7f).build(
getApplicationContext(),
(keywordIndex) -> {
Log.i("YOU SAID IT!", "yesss");
textSpeechInitialize();
startRecognition();
listening();
numUtterances++;
PendingIntent contentIntent = PendingIntent.getActivity(
this,
0,
new Intent(this, MainActivity.class),
0);
final String contentText = numUtterances == 1 ? " time!" : " times!";
Notification n = new NotificationCompat.Builder(this, CHANNEL_ID)
.setContentTitle("Wake word")
.setContentText("Detected " + numUtterances + contentText)
.setSmallIcon(R.drawable.ic_launcher_background)
.setContentIntent(contentIntent)
.build();
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
assert notificationManager != null;
notificationManager.notify(1234, n);
});
porcupineManager.start();
} catch (PorcupineException e) {
Log.e("PORCUPINE", e.toString());
}
return super.onStartCommand(intent, flags, startId);
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public void onDestroy() {
try {
porcupineManager.stop();
porcupineManager.delete();
speechRecognizer.destroy();
} catch (PorcupineException e) {
Log.e("PORCUPINE", e.toString());
}
super.onDestroy();
}
public void listening(){
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
speechRecognizer.setRecognitionListener(new RecognitionListener() {
#Override
public void onReadyForSpeech(Bundle params) {
}
#Override
public void onBeginningOfSpeech() {}
#Override
public void onRmsChanged(float rmsdB) {}
#Override
public void onBufferReceived(byte[] buffer) {}
#Override
public void onEndOfSpeech() {}
#Override
public void onError(int error) {
String errorMessage = getErrorText(error);
Log.i(">>> INFO", "Failed " + errorMessage);
}
#Override
public void onResults(Bundle results) {
ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
userResponse = matches.get(0);
userResponse = userResponse.toLowerCase();
toSpeak(userResponse);
}
#Override
public void onPartialResults(Bundle partialResults) {}
#Override
public void onEvent(int eventType, Bundle params) {}
});
}
public void textSpeechInitialize(){
textToSpeech = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS){
textToSpeech.setLanguage(Locale.getDefault());
textToSpeech.setSpeechRate(speechRate);
String greet = greetings();
toSpeak(greet);
startRecognition();
} else {
Toast.makeText(getApplicationContext(), "Feature not supported", Toast.LENGTH_SHORT).show();
}
}
});
}
public String getErrorText(int errorCode) {
String message;
switch (errorCode) {
...
}
return message;
}
public static String greetings(){
String s = "";
Calendar c = Calendar.getInstance();
int time = c.get(Calendar.HOUR_OF_DAY);
if (time >= 0 && time < 12){
s = "Good Morning sir! how can I help you today?";
} else if (time >= 12 && time < 16){
s = "Good Afternoon sir";
} else if (time >= 16 && time < 22){
s = "Good Evening sir";
}
else if (time >= 22 && time < 24){
s = "Hello sir, you need to take some rest... its getting late!";
}
return s;
}
private void startRecognition() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getPackageName());
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en");
speechRecognizer.startListening(intent);
}
private void toSpeak(String toSpeak){
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
Log.i(">>>Voice Info", String.valueOf(textToSpeech.getVoice()));
}
try {
textToSpeech.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
} catch (Exception e){
e.printStackTrace();
}
}
}
What changes do I need to make to the following code to get an m3u8 link to play?
I'm able to get regular MP4 videos to work, but not HLS. What would I need to do to make HLS links to work?
I would like to implement something like this, that allows for playback of different media sources. https://gist.github.com/navi25/7ab41931eb52bbcb693b5599e6955245#file-mediasourcebuilder-kt
public class VideoPlayerRecyclerView extends RecyclerView {
private static final String TAG = "VideoPlayerRecyclerView";
private enum VolumeState {ON, OFF};
// ui
private ImageView thumbnail, volumeControl;
private ProgressBar progressBar;
private View viewHolderParent;
private FrameLayout frameLayout;
private PlayerView videoSurfaceView;
private SimpleExoPlayer videoPlayer;
// vars
private ArrayList<MediaObject> mediaObjects = new ArrayList<>();
private int videoSurfaceDefaultHeight = 0;
private int screenDefaultHeight = 0;
private Context context;
private int playPosition = -1;
private boolean isVideoViewAdded;
private RequestManager requestManager;
// controlling playback state
private VolumeState volumeState;
public VideoPlayerRecyclerView(#NonNull Context context) {
super(context);
init(context);
}
public VideoPlayerRecyclerView(#NonNull Context context, #Nullable AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context){
this.context = context.getApplicationContext();
Display display = ((WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
Point point = new Point();
display.getSize(point);
videoSurfaceDefaultHeight = point.x;
screenDefaultHeight = point.y;
videoSurfaceView = new PlayerView(this.context);
videoSurfaceView.setResizeMode(AspectRatioFrameLayout.RESIZE_MODE_ZOOM);
BandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
TrackSelection.Factory videoTrackSelectionFactory =
new AdaptiveTrackSelection.Factory(bandwidthMeter);
TrackSelector trackSelector =
new DefaultTrackSelector(videoTrackSelectionFactory);
// 2. Create the player
videoPlayer = ExoPlayerFactory.newSimpleInstance(context, trackSelector);
// Bind the player to the view.
videoSurfaceView.setUseController(false);
videoSurfaceView.setPlayer(videoPlayer);
setVolumeControl(VolumeState.ON);
addOnScrollListener(new RecyclerView.OnScrollListener() {
#Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
super.onScrollStateChanged(recyclerView, newState);
if (newState == RecyclerView.SCROLL_STATE_IDLE) {
Log.d(TAG, "onScrollStateChanged: called.");
if(thumbnail != null){ // show the old thumbnail
thumbnail.setVisibility(VISIBLE);
}
// There's a special case when the end of the list has been reached.
// Need to handle that with this bit of logic
if(!recyclerView.canScrollVertically(1)){
playVideo(true);
}
else{
playVideo(false);
}
}
}
#Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
}
});
addOnChildAttachStateChangeListener(new OnChildAttachStateChangeListener() {
#Override
public void onChildViewAttachedToWindow(View view) {
}
#Override
public void onChildViewDetachedFromWindow(View view) {
if (viewHolderParent != null && viewHolderParent.equals(view)) {
resetVideoView();
}
}
});
videoPlayer.addListener(new Player.EventListener() {
#Override
public void onTimelineChanged(Timeline timeline, #Nullable Object manifest, int reason) {
}
#Override
public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
}
#Override
public void onLoadingChanged(boolean isLoading) {
}
#Override
public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
switch (playbackState) {
case Player.STATE_BUFFERING:
Log.e(TAG, "onPlayerStateChanged: Buffering video.");
if (progressBar != null) {
progressBar.setVisibility(VISIBLE);
}
break;
case Player.STATE_ENDED:
Log.d(TAG, "onPlayerStateChanged: Video ended.");
videoPlayer.seekTo(0);
break;
case Player.STATE_IDLE:
break;
case Player.STATE_READY:
Log.e(TAG, "onPlayerStateChanged: Ready to play.");
if (progressBar != null) {
progressBar.setVisibility(GONE);
}
if(!isVideoViewAdded){
addVideoView();
}
break;
default:
break;
}
}
#Override
public void onRepeatModeChanged(int repeatMode) {
}
#Override
public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {
}
#Override
public void onPlayerError(ExoPlaybackException error) {
}
#Override
public void onPositionDiscontinuity(int reason) {
}
#Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
}
#Override
public void onSeekProcessed() {
}
});
}
public void playVideo(boolean isEndOfList) {
int targetPosition;
if(!isEndOfList){
int startPosition = ((LinearLayoutManager) getLayoutManager()).findFirstVisibleItemPosition();
int endPosition = ((LinearLayoutManager) getLayoutManager()).findLastVisibleItemPosition();
// if there is more than 2 list-items on the screen, set the difference to be 1
if (endPosition - startPosition > 1) {
endPosition = startPosition + 1;
}
// something is wrong. return.
if (startPosition < 0 || endPosition < 0) {
return;
}
// if there is more than 1 list-item on the screen
if (startPosition != endPosition) {
int startPositionVideoHeight = getVisibleVideoSurfaceHeight(startPosition);
int endPositionVideoHeight = getVisibleVideoSurfaceHeight(endPosition);
targetPosition = startPositionVideoHeight > endPositionVideoHeight ? startPosition : endPosition;
}
else {
targetPosition = startPosition;
}
}
else{
targetPosition = mediaObjects.size() - 1;
}
Log.d(TAG, "playVideo: target position: " + targetPosition);
// video is already playing so return
if (targetPosition == playPosition) {
return;
}
// set the position of the list-item that is to be played
playPosition = targetPosition;
if (videoSurfaceView == null) {
return;
}
// remove any old surface views from previously playing videos
videoSurfaceView.setVisibility(INVISIBLE);
removeVideoView(videoSurfaceView);
int currentPosition = targetPosition - ((LinearLayoutManager) getLayoutManager()).findFirstVisibleItemPosition();
View child = getChildAt(currentPosition);
if (child == null) {
return;
}
VideoPlayerViewHolder holder = (VideoPlayerViewHolder) child.getTag();
if (holder == null) {
playPosition = -1;
return;
}
thumbnail = holder.thumbnail;
progressBar = holder.progressBar;
volumeControl = holder.volumeControl;
viewHolderParent = holder.itemView;
requestManager = holder.requestManager;
frameLayout = holder.itemView.findViewById(R.id.media_container);
videoSurfaceView.setPlayer(videoPlayer);
viewHolderParent.setOnClickListener(videoViewClickListener);
DataSource.Factory dataSourceFactory = new DefaultDataSourceFactory(
context, Util.getUserAgent(context, "RecyclerView VideoPlayer"));
String mediaUrl = mediaObjects.get(targetPosition).getMedia_url();
if (mediaUrl != null) {
MediaSource videoSource = new ExtractorMediaSource.Factory(dataSourceFactory)
.createMediaSource(Uri.parse(mediaUrl));
videoPlayer.prepare(videoSource);
videoPlayer.setPlayWhenReady(true);
}
}
private OnClickListener videoViewClickListener = new OnClickListener() {
#Override
public void onClick(View v) {
toggleVolume();
}
};
/**
* Returns the visible region of the video surface on the screen.
* if some is cut off, it will return less than the #videoSurfaceDefaultHeight
* #param playPosition
* #return
*/
private int getVisibleVideoSurfaceHeight(int playPosition) {
int at = playPosition - ((LinearLayoutManager) getLayoutManager()).findFirstVisibleItemPosition();
Log.d(TAG, "getVisibleVideoSurfaceHeight: at: " + at);
View child = getChildAt(at);
if (child == null) {
return 0;
}
int[] location = new int[2];
child.getLocationInWindow(location);
if (location[1] < 0) {
return location[1] + videoSurfaceDefaultHeight;
} else {
return screenDefaultHeight - location[1];
}
}
// Remove the old player
private void removeVideoView(PlayerView videoView) {
ViewGroup parent = (ViewGroup) videoView.getParent();
if (parent == null) {
return;
}
int index = parent.indexOfChild(videoView);
if (index >= 0) {
parent.removeViewAt(index);
isVideoViewAdded = false;
viewHolderParent.setOnClickListener(null);
}
}
private void addVideoView(){
frameLayout.addView(videoSurfaceView);
isVideoViewAdded = true;
videoSurfaceView.requestFocus();
videoSurfaceView.setVisibility(VISIBLE);
videoSurfaceView.setAlpha(1);
thumbnail.setVisibility(GONE);
}
private void resetVideoView(){
if(isVideoViewAdded){
removeVideoView(videoSurfaceView);
playPosition = -1;
videoSurfaceView.setVisibility(INVISIBLE);
thumbnail.setVisibility(VISIBLE);
}
}
public void releasePlayer() {
if (videoPlayer != null) {
videoPlayer.release();
videoPlayer = null;
}
viewHolderParent = null;
}
private void toggleVolume() {
if (videoPlayer != null) {
if (volumeState == VolumeState.OFF) {
Log.d(TAG, "togglePlaybackState: enabling volume.");
setVolumeControl(VolumeState.ON);
} else if(volumeState == VolumeState.ON) {
Log.d(TAG, "togglePlaybackState: disabling volume.");
setVolumeControl(VolumeState.OFF);
}
}
}
private void setVolumeControl(VolumeState state){
volumeState = state;
if(state == VolumeState.OFF){
videoPlayer.setVolume(0f);
animateVolumeControl();
}
else if(state == VolumeState.ON){
videoPlayer.setVolume(1f);
animateVolumeControl();
}
}
private void animateVolumeControl(){
if(volumeControl != null){
volumeControl.bringToFront();
if(volumeState == VolumeState.OFF){
requestManager.load(R.drawable.ic_volume_off_grey_24dp)
.into(volumeControl);
}
else if(volumeState == VolumeState.ON){
requestManager.load(R.drawable.ic_volume_up_grey_24dp)
.into(volumeControl);
}
volumeControl.animate().cancel();
volumeControl.setAlpha(1f);
volumeControl.animate()
.alpha(0f)
.setDuration(600).setStartDelay(1000);
}
}
public void setMediaObjects(ArrayList<MediaObject> mediaObjects){
this.mediaObjects = mediaObjects;
}
}
You need to use HlsMediaSource instead of the default MediaSource for HLS.
Media sources
I am trying to make a video calling app for the first time. I am using Agora.io in android studio for video calling. The problem I am facing is I am not able to see the video of the person I am calling. I am perfectly getting my own from the front camera.
I am stuck on this issue for days.
Here is the code of Dashboard.java.
public class Dashboard extends AppCompatActivity {
private static final String TAG = "1";
private static final int PERMISSION_REQ_ID = 22;
// Permission WRITE_EXTERNAL_STORAGE is not mandatory
// for Agora RTC SDK, just in case if you wanna save
// logs to external sdcard.
private static final String[] REQUESTED_PERMISSIONS = {
Manifest.permission.READ_PHONE_STATE,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
};
private RtcEngine mRtcEngine;
private boolean mCallEnd;
private boolean mMuted;
private FrameLayout mLocalContainer;
private RelativeLayout mRemoteContainer;
private SurfaceView mLocalView;
private SurfaceView mRemoteView;
private ImageView mCallBtn;
private ImageView mMuteBtn;
private ImageView mSwitchCameraBtn;
/**
* Event handler registered into RTC engine for RTC callbacks.
* Note that UI operations needs to be in UI thread because RTC
* engine deals with the events in a separate thread.
*/
private final IRtcEngineEventHandler mRtcEventHandler = new IRtcEngineEventHandler() {
#Override
public void onJoinChannelSuccess(String channel, final int uid, int elapsed) {
runOnUiThread(new Runnable() {
#Override
public void run() {
}
});
}
#Override
public void onFirstRemoteVideoDecoded(final int uid, int width, int height, int elapsed) {
runOnUiThread(new Runnable() {
#Override
public void run() {
setupRemoteVideo(uid);
}
});
}
#Override
public void onUserOffline(final int uid, int reason) {
runOnUiThread(new Runnable() {
#Override
public void run() {
onRemoteUserLeft();
}
});
}
};
private void setupRemoteVideo(int uid) {
// Only one remote video view is available for this
// tutorial. Here we check if there exists a surface
// view tagged as this uid.
int count = mRemoteContainer.getChildCount();
View view = null;
for (int i = 0; i < count; i++) {
View v = mRemoteContainer.getChildAt(i);
if (v.getTag() instanceof Integer && ((int) v.getTag()) == uid) {
view = v;
}
}
if (view != null) {
return;
}
mRemoteView = RtcEngine.CreateRendererView(getBaseContext());
mRemoteContainer.addView(mRemoteView);
mRtcEngine.setupRemoteVideo(new VideoCanvas(mRemoteView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
mRemoteView.setTag(uid);
}
private void onRemoteUserLeft() {
removeRemoteVideo();
}
private void removeRemoteVideo() {
if (mRemoteView != null) {
mRemoteContainer.removeView(mRemoteView);
}
mRemoteView = null;
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_dashboard);
initUI();
// Ask for permissions at runtime.
// This is just an example set of permissions. Other permissions
// may be needed, and please refer to our online documents.
if (checkSelfPermission(REQUESTED_PERMISSIONS[0], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[1], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[2], PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[3], PERMISSION_REQ_ID)) {
initEngineAndJoinChannel();
}
}
private void initUI() {
mLocalContainer = findViewById(R.id.local_video_view_container);
mRemoteContainer = findViewById(R.id.remote_video_view_container);
mCallBtn = findViewById(R.id.btn_call);
mMuteBtn = findViewById(R.id.btn_mute);
mSwitchCameraBtn = findViewById(R.id.btn_switch_camera);
}
private boolean checkSelfPermission(String permission, int requestCode) {
if (ContextCompat.checkSelfPermission(this, permission) !=
PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, REQUESTED_PERMISSIONS, requestCode);
return false;
}
return true;
}
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions, #NonNull int[] grantResults) {
if (requestCode == PERMISSION_REQ_ID) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED ||
grantResults[1] != PackageManager.PERMISSION_GRANTED ||
grantResults[2] != PackageManager.PERMISSION_GRANTED ||
grantResults[3] != PackageManager.PERMISSION_GRANTED) {
showLongToast("Need permissions " + Manifest.permission.RECORD_AUDIO +
"/" + Manifest.permission.CAMERA + "/" + Manifest.permission.WRITE_EXTERNAL_STORAGE
+ "/" + Manifest.permission.READ_PHONE_STATE);
finish();
return;
}
// Here we continue only if all permissions are granted.
// The permissions can also be granted in the system settings manually.
initEngineAndJoinChannel();
}
}
private void showLongToast(final String msg) {
this.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_LONG).show();
}
});
}
private void initEngineAndJoinChannel() {
// This is our usual steps for joining
// a channel and starting a call.
initializeEngine();
setupVideoConfig();
setupLocalVideo();
joinChannel();
}
private void initializeEngine() {
try {
mRtcEngine = RtcEngine.create(getBaseContext(), getString(R.string.app_id_agora), mRtcEventHandler);
} catch (Exception e) {
Log.e(TAG, Log.getStackTraceString(e));
throw new RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e));
}
}
private void setupVideoConfig() {
// In simple use cases, we only need to enable video capturing
// and rendering once at the initialization step.
// Note: audio recording and playing is enabled by default.
mRtcEngine.enableVideo();
// Please go to this page for detailed explanation
// https://docs.agora.io/en/Video/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_rtc_engine.html#af5f4de754e2c1f493096641c5c5c1d8f
mRtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
VideoEncoderConfiguration.VD_640x360,
VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15,
VideoEncoderConfiguration.STANDARD_BITRATE,
VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT));
}
private void setupLocalVideo() {
// This is used to set a local preview.
// The steps setting local and remote view are very similar.
// But note that if the local user do not have a uid or do
// not care what the uid is, he can set his uid as ZERO.
// Our server will assign one and return the uid via the event
// handler callback function (onJoinChannelSuccess) after
// joining the channel successfully.
mLocalView = RtcEngine.CreateRendererView(getBaseContext());
mLocalView.setZOrderMediaOverlay(true);
mLocalContainer.addView(mLocalView);
mRtcEngine.setupLocalVideo(new VideoCanvas(mLocalView, VideoCanvas.RENDER_MODE_HIDDEN, 0));
}
private void joinChannel() {
// 1. Users can only see each other after they join the
// same channel successfully using the same app id.
// 2. One token is only valid for the channel name that
// you use to generate this token.
String token = "12312323123123wedsa";
mRtcEngine.joinChannel(token, "brolChannelbrobro", "Extra Optional Data", 0);
}
#Override
protected void onDestroy() {
super.onDestroy();
if (!mCallEnd) {
leaveChannel();
}
RtcEngine.destroy();
}
private void leaveChannel() {
mRtcEngine.leaveChannel();
}
public void onLocalAudioMuteClicked(View view) {
mMuted = !mMuted;
mRtcEngine.muteLocalAudioStream(mMuted);
int res = mMuted ? R.drawable.btn_mute : R.drawable.btn_unmute;
mMuteBtn.setImageResource(res);
}
public void onSwitchCameraClicked(View view) {
mRtcEngine.switchCamera();
}
public void onCallClicked(View view) {
if (mCallEnd) {
startCall();
mCallEnd = false;
mCallBtn.setImageResource(R.drawable.btn_endcall);
} else {
endCall();
mCallEnd = true;
mCallBtn.setImageResource(R.drawable.btn_startcall);
}
showButtons(!mCallEnd);
}
private void startCall() {
setupLocalVideo();
joinChannel();
}
private void endCall() {
removeLocalVideo();
removeRemoteVideo();
leaveChannel();
}
private void removeLocalVideo() {
if (mLocalView != null) {
mLocalContainer.removeView(mLocalView);
}
mLocalView = null;
}
private void showButtons(boolean show) {
int visibility = show ? View.VISIBLE : View.GONE;
mMuteBtn.setVisibility(visibility);
mSwitchCameraBtn.setVisibility(visibility);
}
}
I had the same issue. In my case it was a layout problem, as I wasn't making the local video view gone and remote video view visible. I don't know if it still helps after all these years.
In XNA, i want to manage background sound for every game state. For example; StartMenu music will be playing when option is Start or Playing music will be playing when option is Playing. I arranged code as followed, as you can see i create play and stop method inside all sound class but it's only working while exiting to game.
public abstract class Sound
{
public SoundEffect Item { get; set; }
public Song BgSound { get; set; }
public abstract void LoadContent(ContentManager content);
public abstract void Play();
public abstract void Stop();
}
public class GameSound : Sound
{
public GameSound()
{
BgSound = null;
Item = null;
}
public override void LoadContent(ContentManager content)
{
BgSound = content.Load<Song>("Sounds/BgSound");
MediaPlayer.IsRepeating = true;
}
public override void Play()
{
MediaPlayer.Play(BgSound);
}
public override void Stop()
{
MediaPlayer.Stop();
}
}
public class StartUpSound : Sound
{
public StartUpSound()
{
BgSound = null;
Item = null;
}
public override void LoadContent(ContentManager content)
{
BgSound = content.Load<Song>("Sounds/StartUp");
MediaPlayer.IsRepeating = false;
}
public override void Play()
{
MediaPlayer.Play(BgSound);
}
public override void Stop()
{
MediaPlayer.Stop();
}
}
public class GameBase : Microsoft.Xna.Framework.Game
{
//..
GameSound gameSoundFx = new GameSound();
StartUpSound startUpSoundFx = new StartUpSound();
//..
protected override void LoadContent()
{
spriteBatch = new SpriteBatch(GraphicsDevice);
gameSoundFx.LoadContent(Content);
startUpSoundFx.LoadContent(Content);
}
protected override void Draw(GameTime gameTime)
{
GraphicsDevice.Clear(Color.CornflowerBlue);
spriteBatch.Begin();
if (currentState == GameState.StartMenu)
{
startUpSoundFx.Play();
}
if (currentState == GameState.Playing)
{
startUpSoundFx.Stop();
gameSoundFx.Play();
}
spriteBatch.End();
base.Draw(gameTime);
}
}
Edited
Also i tried again like this but nothing has changed;
if (currentState == GameState.StartMenu)
{
startUpSoundFx.BgSound = Content.Load<Song>("Sounds/StartUp");
MediaPlayer.Play(startUpSoundFx.BgSound);
}
if (currentState == GameState.Playing)
{
MediaPlayer.Stop();
gameSoundFx.BgSound = Content.Load<Song>("Sounds/StartUp");
MediaPlayer.Play(gameSoundFx.BgSound);
}
This has already been answered -
https://gamedev.stackexchange.com/questions/86038/c-how-to-properly-play-background-songs-in-xna
Please review the answer there, here's the example included -
switch (currentGameState)
{
case GameState.MainMenu:
if (musicState == MusicState.Playing && currentGameState != lastGameState)
{
MediaPlayer.Stop();
musicState = MusicState.NotPlaying;
}
if (musicState == MusicState.NotPlaying)
{
MediaPlayer.Play(song_mainTheme);
musicState = MusicState.Playing;
}
break;
case GameState.GamePlaying:
if (musicState == MusicState.Playing && currentGameState != lastGameState)
{
MediaPlayer.Stop();
musicState = MusicState.NotPlaying;
}
if (musicState == MusicState.NotPlaying)
{
MediaPlayer.Play(song_actionTheme);
musicState = MusicState.Playing;
}
break;
}
It shows extending the states to allow for proper use of the Mediaplayer - as stated in the comments of your question.
I'm trying to create simple app which contains of ten activities. Each activity look pretty much the same, it has four buttons (different color) and when one particular button is clicked it's open next activity. OnCreate method of every activity has mediaplayer that play name of that activity. After some time I see in LogCat that gc freed some objects and at that time Activity don't play any sound and buttons are disabled.
Do you have any advice or suggestions how to resole that?
Here is code of one Activity:
public class Green extends Activity {
int buttonActive = 0;
int buttonWrong = 0;
MediaPlayer player;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_green);
buttonWrong = 0;
playSound();
Button btn = (Button)findViewById(R.id.button1);
Button btn2 = (Button)findViewById(R.id.button2);
Button btn3 = (Button)findViewById(R.id.button3);
Button btn4 = (Button)findViewById(R.id.button4);
btn.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(buttonActive == 1){
playSoundCorrect();
Intent intent = new Intent(v.getContext(),Blue.class);
startActivity(intent);
}
}
});
btn2.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if(buttonActive == 1 && buttonWrong == 0){
buttonWrong = 1;
playSoundWrong();
}
}
});
btn3.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if(buttonActive == 1 && buttonWrong == 0){
buttonWrong = 1;
playSoundWrong();
}
}
});
btn4.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if(buttonActive == 1 && buttonWrong == 0){
buttonWrong = 1;
playSoundWrong();
}
}
});
}
#Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
finish();
}
private void playSound(){
MediaPlayer player = MediaPlayer.create(this, R.raw.green);
player.start();
player.setOnCompletionListener(new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
mp.release();
buttonActive = 1;
}
});
}
private void playSoundWrong(){
player = MediaPlayer.create(this, R.raw.wrong2);
if(!player.isPlaying()){
player.start();
}
player.setOnCompletionListener(new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
mp.release();
buttonWrong = 0;
}
});
}
private void playSoundCorrect(){
MediaPlayer player = MediaPlayer.create(this, R.raw.correct);
player.start();
player.setOnCompletionListener(new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
mp.release();
}
});
}
}
It looks like in private void playSound you're masking the class's player property. If you say player = MediaPlayer.create then the class's player property will be set to the new MediaPlayer, and the player will be reachable so long as the class is reachable. Instead you're saying MediaPlayer player = MediaPlayer.create, which is creating a new player variable that is scoped to the playSound method - once the method terminates the player is no longer reachable and will be garbage-collected, even if the class is still reachable.