Making a VR video player using Google Cardboard [closed] - google-cardboard

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 7 years ago.
Improve this question
How to make a simple VR video player with Google CardBoard Android SDK ? I am a new man in this area. I am trying to change a program in github to play video for cardboard now !

I used Rajawali Library along with Google Cardboard Sdk and Modified initscene() in this way to get this work.
public class VideoRenderer extends RajawaliCardboardRenderer {
Context mContext;
private MediaPlayer mMediaPlayer;
private StreamingTexture mVideoTexture;
public VideoRenderer(Context context) {
super(context);
mContext = context;
}
#Override
protected void initScene() {
mMediaPlayer = MediaPlayer.create(getContext(),
R.raw.video);
mMediaPlayer.setLooping(true);
mVideoTexture = new StreamingTexture("sintelTrailer", mMediaPlayer);
Material material = new Material();
material.setColorInfluence(0);
try {
material.addTexture(mVideoTexture);
} catch (ATexture.TextureException e) {
e.printStackTrace();
}
Sphere sphere = new Sphere(50, 64, 32);
sphere.setScaleX(-1);
sphere.setMaterial(material);
getCurrentScene().addChild(sphere);
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(75);
mMediaPlayer.start();
}
#Override
protected void onRender(long ellapsedRealtime, double deltaTime) {
super.onRender(ellapsedRealtime, deltaTime);
mVideoTexture.update();
}
#Override
public void onPause() {
super.onPause();
if (mMediaPlayer != null)
mMediaPlayer.pause();
}
#Override
public void onResume() {
super.onResume();
if (mMediaPlayer != null)
mMediaPlayer.start();
}
#Override
public void onRenderSurfaceDestroyed(SurfaceTexture surfaceTexture) {
super.onRenderSurfaceDestroyed(surfaceTexture);
mMediaPlayer.stop();
mMediaPlayer.release();
}
}

Related

When they want to log in my app crashes firebase realtime database [duplicate]

This question already has answers here:
Validate username and email crashed and cannot insert to firebase database
(2 answers)
Cannot add data to different nodes in firebase
(2 answers)
Closed 9 months ago.
I am using a real-time database (in firebase) and my error is that when trying to enter the account it gives me an error and the application crashes in the case it indicates that the error is that attached the code and one more image clear
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
final EditText et_correo = findViewById(R.id.et_correo);
final EditText et_contrasena = findViewById(R.id.et_contrasena);
final Button btn_ingresar = findViewById(R.id.btn_ingresar);
final TextView btn_registrate = findViewById(R.id.btn_registrate);
btn_ingresar.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
final String correotxt = et_correo.getText().toString();
final String contrasenaTxt = et_contrasena.getText().toString();
if (correotxt.isEmpty() || contrasenaTxt.isEmpty()) {
Toast.makeText(LoginActivity.this, "Favor de introducir sus datos", Toast.LENGTH_SHORT).show();
} else {
databaseReference.child("nombre").addListenerForSingleValueEvent(new ValueEventListener() {
#Override
public void onDataChange(#NonNull DataSnapshot snapshot) {
//THIS IS MY ERROR
if (snapshot.hasChild(correotxt)) {
final String getPassword = snapshot.child(correotxt).child("contrasena").getValue(String.class);
if (getPassword.equals(contrasenaTxt)) {
Toast.makeText(LoginActivity.this, "Se ha logueado de manera exitosa!", Toast.LENGTH_SHORT).show();
startActivity(new Intent(LoginActivity.this, MainActivity.class));
finish();
}else{
Toast.makeText(LoginActivity.this, "Correo o ContraseƱa incorrectos", Toast.LENGTH_SHORT).show();
}
}
}
#Override
public void onCancelled(#NonNull DatabaseError error) {
}
});
btn_registrate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
// open RegistroActivity
startActivity(new Intent(LoginActivity.this, RegistroActivity.class));
}
});
}
}
});}}
IMG. LOGCAT

How to add Search view for Firebase realtime database? [closed]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 5 years ago.
Improve this question
I am developing an android app with Firebase real-time database. I created RecyclerView with Cardview and want to make a SearchView in the toolbar for all the data. Please help me how to create this.
It is my Adapter file
public class RecycleAdepter extends RecyclerView.Adapter<RecycleAdepter.MyViewHolder> {
ArrayList<Blog> arrayList=new ArrayList<>();
RecycleAdepter(ArrayList<Blog> arrayList)
{
this.arrayList=arrayList;
}
#Override
public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.bank_row,parent,false);
return new MyViewHolder(view);
}
#Override
public void onBindViewHolder(MyViewHolder holder, int position) {
holder.Title.setText(arrayList.get(position).getTitle());
}
#Override
public int getItemCount() {
return arrayList.size();
}
public static class MyViewHolder extends RecyclerView.ViewHolder{
TextView Title;
public MyViewHolder(View itemView) {
super(itemView);
Title=(TextView) itemView.findViewById(R.id.Idiom_title);
}
}
public void setFilter(ArrayList<Blog> newList)
{
This is my Search code for Main activity
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_search, menu);
MenuItem menuItem = menu.findItem(R.id.searchbar);
SearchView searchView = (SearchView) MenuItemCompat.getActionView(menuItem);
searchView.setOnQueryTextListener(this);
return true;
}
#Override
public boolean onQueryTextSubmit(String query) {
return false;
}
#Override
public boolean onQueryTextChange(String newText) {
return false;
}
From what I understand you have already downloaded all the data in a RecyclerView and then you just implement SearchView and search the data downloaded within the RecyclerView. This has already been explained here: How to filter a RecyclerView with a SearchView

Recognizing multiple keyphrases in CMUSphinx for Android [duplicate]

I've installed the PocketSphinx demo and it works fine under Ubuntu and Eclipse, but despite trying I can't work out how I would add recognition of multiple words.
All I want is for the code to recognize single words, which I can then switch() within the code, e.g. "up", "down", "left", "right". I don't want to recognize sentences, just single words.
Any help on this would be grateful. I have spotted other users' having similar problems but nobody knows the answer so far.
One thing which is baffling me is why do we need to use the "wakeup" constant at all?
private static final String KWS_SEARCH = "wakeup";
private static final String KEYPHRASE = "oh mighty computer";
.
.
.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
What has wakeup got to do with anything?
I have made some progress (?) : Using addGrammarSearch I am able to use a .gram file to list my words, e.g. up,down,left,right,forwards,backwards, which seems to work well if all I say are those particular words. However, any other words will cause the system to match what is said to the "nearest" word from those stated. Ideally I don't want recognition to occur if words spoken are not in the .gram file...
Thanks to Nikolay's tip (see his answer above), I have developed the following code which works fine, and does not recognize words unless they're on the list. You can copy and paste this directly over the main class in the PocketSphinxDemo code:
public class PocketSphinxActivity extends Activity implements RecognitionListener
{
private static final String DIGITS_SEARCH = "digits";
private SpeechRecognizer recognizer;
#Override
public void onCreate(Bundle state)
{
super.onCreate(state);
setContentView(R.layout.main);
((TextView) findViewById(R.id.caption_text)).setText("Preparing the recognizer");
try
{
Assets assets = new Assets(PocketSphinxActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
}
catch (IOException e)
{
// oops
}
((TextView) findViewById(R.id.caption_text)).setText("Say up, down, left, right, forwards, backwards");
reset();
}
#Override
public void onPartialResult(Hypothesis hypothesis)
{
}
#Override
public void onResult(Hypothesis hypothesis)
{
((TextView) findViewById(R.id.result_text)).setText("");
if (hypothesis != null)
{
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
}
}
#Override
public void onBeginningOfSpeech()
{
}
#Override
public void onEndOfSpeech()
{
reset();
}
private void setupRecognizer(File assetsDir)
{
File modelsDir = new File(assetsDir, "models");
recognizer = defaultSetup().setAcousticModel(new File(modelsDir, "hmm/en-us-semi"))
.setDictionary(new File(modelsDir, "dict/cmu07a.dic"))
.setRawLogDir(assetsDir).setKeywordThreshold(1e-20f)
.getRecognizer();
recognizer.addListener(this);
File digitsGrammar = new File(modelsDir, "grammar/digits.gram");
recognizer.addKeywordSearch(DIGITS_SEARCH, digitsGrammar);
}
private void reset()
{
recognizer.stop();
recognizer.startListening(DIGITS_SEARCH);
}
}
Your digits.gram file should be something like:
up /1e-1/
down /1e-1/
left /1e-1/
right /1e-1/
forwards /1e-1/
backwards /1e-1/
You should experiment with the thresholds within the double slashes // for performance, where 1e-1 represents 0.1 (I think). I think the maximum is 1.0.
And it's 5.30pm so I can stop working now. Result.
you can use addKeywordSearch which uses to file with keyphrases. One phrase per line with threshold for each phrase in //, for example
up /1.0/
down /1.0/
left /1.0/
right /1.0/
forwards /1e-1/
Threshold must be selected to avoid false alarms.
Working on updating Antinous amendment to the PocketSphinx demo to allow it to run on Android Studio. This is what I have so far,
//Note: change MainActivity to PocketSphinxActivity for demo use...
public class MainActivity extends Activity implements RecognitionListener {
private static final String DIGITS_SEARCH = "digits";
private SpeechRecognizer recognizer;
/* Used to handle permission request */
private static final int PERMISSIONS_REQUEST_RECORD_AUDIO = 1;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
setContentView(R.layout.main);
((TextView) findViewById(R.id.caption_text))
.setText("Preparing the recognizer");
// Check if user has given permission to record audio
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, PERMISSIONS_REQUEST_RECORD_AUDIO);
return;
}
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(MainActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
((TextView) findViewById(R.id.caption_text))
.setText("Failed to init recognizer " + result);
} else {
reset();
}
}
}.execute();
((TextView) findViewById(R.id.caption_text)).setText("Say one, two, three, four, five, six...");
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null) {
return;
} else if (hypothesis != null) {
if (recognizer != null) {
//recognizer.rapidSphinxPartialResult(hypothesis.getHypstr());
String text = hypothesis.getHypstr();
if (text.equals(DIGITS_SEARCH)) {
recognizer.cancel();
performAction();
recognizer.startListening(DIGITS_SEARCH);
}else{
//Toast.makeText(getApplicationContext(),"Partial result = " +text,Toast.LENGTH_SHORT).show();
}
}
}
}
#Override
public void onResult(Hypothesis hypothesis) {
((TextView) findViewById(R.id.result_text)).setText("");
if (hypothesis != null) {
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), "Hypothesis" +text, Toast.LENGTH_SHORT).show();
}else if(hypothesis == null){
makeText(getApplicationContext(), "hypothesis = null", Toast.LENGTH_SHORT).show();
}
}
#Override
public void onDestroy() {
super.onDestroy();
recognizer.cancel();
recognizer.shutdown();
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onEndOfSpeech() {
reset();
}
#Override
public void onTimeout() {
}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
// .setRawLogDir(assetsDir).setKeywordThreshold(1e-20f)
.getRecognizer();
recognizer.addListener(this);
File digitsGrammar = new File(assetsDir, "digits.gram");
recognizer.addKeywordSearch(DIGITS_SEARCH, digitsGrammar);
}
private void reset(){
recognizer.stop();
recognizer.startListening(DIGITS_SEARCH);
}
#Override
public void onError(Exception error) {
((TextView) findViewById(R.id.caption_text)).setText(error.getMessage());
}
public void performAction() {
// do here whatever you want
makeText(getApplicationContext(), "performAction done... ", Toast.LENGTH_SHORT).show();
}
}
Caveat emptor: this is a work in progress. Check back later. Suggestions would be appreciated.

Am I doing something wrong when adding an ad unit on Admob

Hello I am a new developer and I have created my first app on Android Studios.
I used test ads made by Admob to test if my ads worked and they did. When I finally published my app with MY ad unit code for some reason it didn't work. I then checked online and found that it may take some time before they activate, so I waited and waited, until 3 days past and still it didn't work.
Here are the steps I took:
Follow the tutorial made by Admob to implement code for rewarded ads
Add network permissions
Linked my published app to Admob
Made an ad unit on Admob by clicking "ADD AD UNIT"
I was wondering whether or not I missed a step but if I did why would the test ads work but not the real ones.
I would have liked to contact Admob directly but they don't seem to have any customer service email. YOU ARE MY LAST HOPE PLEASE HELP. thank you
Code: MainActivity Class
public class MainActivity extends Activity implements RewardedVideoAdListener {
public static RewardedVideoAd mAd;
public static RewardedVideoAd mAd2;
public static MediaPlayer click;
public static MediaPlayer unlock;
public static MediaPlayer thud;
public static InterstitialAd mInterstitialAd;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
this.requestWindowFeature(getWindow().FEATURE_NO_TITLE);
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
Constants.SCREEN_WIDTH = dm.widthPixels;
Constants.SCREEN_HEIGHT = dm.heightPixels;
setContentView(new GamePanel(this));
mAd = MobileAds.getRewardedVideoAdInstance(this);
mAd.setRewardedVideoAdListener(this);
mAd2 = MobileAds.getRewardedVideoAdInstance(this);
mAd2.setRewardedVideoAdListener(this);
loadAd();
click = MediaPlayer.create(getApplicationContext(), R.raw.click_sound);
unlock = MediaPlayer.create(getApplicationContext(), R.raw.unlock_sound);
thud = MediaPlayer.create(getApplicationContext(), R.raw.thud_sound);
mInterstitialAd = new InterstitialAd(this);
mInterstitialAd.setAdUnitId("ca-app-pub-3940256099942544/1033173712");
mInterstitialAd.loadAd(new AdRequest.Builder().build());
}
private void click() {
click.start();
}
private void unlock() {
unlock.start();
}
private void thud() {
thud.start();
}
private void loadAd() {
if (!mAd.isLoaded()) {
mAd.loadAd("ca-app-pub-3940256099942544/5224354917", new AdRequest.Builder().build());
}
if (!mAd2.isLoaded()) {
mAd2.loadAd("ca-app-pub-3940256099942544/5224354917", new AdRequest.Builder().build());
}
}
// Required to reward the user.
#Override
public void onRewarded(RewardItem reward) {
if (GamePanel.Ad1 == 1) {
Toast.makeText(this, "Congrats 30 Survival Points Added!", Toast.LENGTH_SHORT).show();
}
if (GamePanel.Ad2 == 1) {
Toast.makeText(this, "Congrats 100 Survival Points Added!", Toast.LENGTH_SHORT).show();
}
}
// The following listener methods are optional.
#Override
public void onRewardedVideoAdLeftApplication() {
}
#Override
public void onRewardedVideoAdClosed() {
if (GamePanel.Ad1 == 1) {
GamePanel.HighCoin = GamePanel.HighCoin + 30;
GamePanel.Ad1 = 0;
Toast.makeText(this, "Congrats 30 Survival Points Added!", Toast.LENGTH_SHORT).show();
}
if (GamePanel.Ad2 == 1) {
GamePanel.HighCoin = GamePanel.HighCoin + 100;
Toast.makeText(this, "Congrats 100 Survival Points Added!", Toast.LENGTH_SHORT).show();
}
}
#Override
public void onRewardedVideoAdFailedToLoad(int errorCode) {
}
#Override
public void onRewardedVideoAdLoaded() {
}
#Override
public void onRewardedVideoAdOpened() {
}
#Override
public void onRewardedVideoStarted() {
}
}
And to load the code i used:
MainActivity.mInterstitialAd.show();
if (MainActivity.mAd.isLoaded())
MainActivity.mAd.show();
The ad unit code shown above is the test ad code given by Admob, which does work but I'm having trouble with the codes I make on Admob myself.
Thanks for the help, But i figured it out.
i forgot to put in my billing info on admob.

Are distorted graphics caused by AndEngine/GL + Android Nougat, or a bug in the AVD?

I'm getting some buggy graphics behaviour when I run my AndEngine-based app on AVDs with the Android 7.0 (Nougat) image in Android Studio 2.2.2. The result is normal when I use Android 6 (link), but it is translated, enlarged and wrapped in the x-dimension, with distorted colours when I use Android 7 (link).
Has anyone had experience with this type of graphics distortion? Could it be caused by Android 7.0 not being backward-compatible with something in AndEngine/OpenGL, or a problem with the Nougat image in Android Studio's AVD?
The minimal app I used to recreate the behaviour uses AndEngine (GLES2-AnchorCenter) to display a sprite on the main scene. I've tested it with several AVDs and it is consistently buggy with Nougat regardless of the device.
public class MainActivity extends SimpleLayoutGameActivity {
private static int CAMERA_WIDTH = 480;
private static int CAMERA_HEIGHT = 800;
private Scene mScene;
private ITextureRegion myTexture;
#Override
protected int getLayoutID() {
return R.layout.activity_main;
}
#Override
protected int getRenderSurfaceViewID() {
return R.id.gameview;
}
#Override
public void onCreate(Bundle pSavedInstanceState) {
super.onCreate(pSavedInstanceState);
}
#Override
public Engine onCreateEngine(EngineOptions pEngineOptions) {
return new Engine(pEngineOptions);
}
#Override
public EngineOptions onCreateEngineOptions() {
ScreenOrientation orientation = ScreenOrientation.PORTRAIT_FIXED;
EngineOptions en = new EngineOptions(true, orientation,
new FillResolutionPolicy(), new Camera(0, 0, CAMERA_WIDTH, CAMERA_HEIGHT));
return en;
}
#Override
public synchronized void onPauseGame() {
super.onPauseGame();
}
#Override
public synchronized void onResumeGame() {
super.onResumeGame();
}
#Override
protected void onCreateResources() {
BitmapTextureAtlasTextureRegionFactory.setAssetBasePath("gfx/");
final BuildableBitmapTextureAtlas gameTextureAtlasBIPMA2 = new BuildableBitmapTextureAtlas(this.getTextureManager(),
136, 136, BitmapTextureFormat.RGBA_4444, TextureOptions.BILINEAR_PREMULTIPLYALPHA);
myTexture = BitmapTextureAtlasTextureRegionFactory.createFromAsset(
gameTextureAtlasBIPMA2, this.getAssets(), "redbomb.png");
try {
gameTextureAtlasBIPMA2.build(new BlackPawnTextureAtlasBuilder<IBitmapTextureAtlasSource, BitmapTextureAtlas>(2, 0, 2));
gameTextureAtlasBIPMA2.load();
} catch (Exception e) {
throw new RuntimeException("Error loading BIPMA2", e);
}
}
#Override
protected Scene onCreateScene() {
this.mScene = new Scene();
// Add a sprite
float diameter = CAMERA_WIDTH/1.5f;
this.mScene.attachChild(new Sprite(CAMERA_WIDTH/2f, CAMERA_HEIGHT/2f,
diameter, myTexture.getHeight() * diameter / myTexture.getWidth(),
myTexture, getVertexBufferObjectManager()));
return this.mScene;
}
}
UPDATE: I have isolated the cause to the use of RGBA_4444 as the texture pixel format; when changed to RGBA_8888 the graphics are normal. Still, if anyone knows how to fix it so that I can use RGBA_4444...

Resources