Issues with runaway thread - android-studio

I'm pretty new to android studio. I noticed that my program had a very severe performance hiccup and I believe it is slowing down after I run the app every time. I think I have a runaway thread and I will attach pictures at the end of my post. I could really use some help. The first picture shows an example of the thread and then the second picture shows the thread after 5 minutes or so of waiting. I attached two codes. CameraSurfaceView runs the code while FaceDetectionThread creates the thread.
package com.example.phliip_vision;
import java.util.ArrayList;
import java.util.List;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.media.FaceDetector;
import android.media.FaceDetector.Face;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import com.example.phliip_vision.Point;
import com.example.phliip_vision.MeasurementStepMessage;
import com.example.phliip_vision.MessageHUB;
import com.example.phliip_vision.Util;
public class CameraSurfaceView extends SurfaceView implements Callback,
Camera.PreviewCallback {
public static final int CALIBRATION_DISTANCE_A4_MM = 294;
public static final int CALIBRATION_MEASUREMENTS = 10;
public static final int AVERAGE_THREASHHOLD = 5;
private static final String TAG = "CameraSurfaceView";
/**
* Measured distance at calibration point
*/
private float _distanceAtCalibrationPoint = -1;
private float _currentAvgEyeDistance = -1;
// private int _facesFoundInMeasurement = -1;
/**
* in cm
*/
private float _currentDistanceToFace = -1;
private final SurfaceHolder mHolder;
private Camera mCamera;
private Face _foundFace = null;
private int _threashold = CALIBRATION_MEASUREMENTS;
private FaceDetectionThread _currentFaceDetectionThread;
private List<Point> _points;
protected final Paint _middlePointColor = new Paint();
protected final Paint _eyeColor = new Paint();
private Size _previewSize;
// private boolean _measurementStartet = false;
private boolean _calibrated = false;
private boolean _calibrating = false;
private int _calibrationsLeft = -1;
public CameraSurfaceView(final Context context, final AttributeSet attrs) {
super(context, attrs);
_middlePointColor.setARGB(100, 200, 0, 0);
_middlePointColor.setStyle(Paint.Style.FILL);
_middlePointColor.setStrokeWidth(2);
_eyeColor.setColor(Color.GREEN);
mHolder = getHolder();
mHolder.addCallback(this);
}
public void setCamera(final Camera camera) {
mCamera = camera;
if (mCamera != null) {
requestLayout();
Log.d(TAG, "mCamera RANNNNNNN!!!!");
Camera.Parameters params = mCamera.getParameters();
camera.setDisplayOrientation(90);
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
Log.d(TAG, "FOCUS_MODE_AUTO RANNNNNNN!!!!");
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// set Camera parameters
mCamera.setParameters(params);
}
}
}
/**
* Variables for the onDraw method, in order to prevent variable allocation
* to slow down the sometimes heavily called onDraw method
*/
private final PointF _middlePoint = new PointF();
private final Rect _trackingRectangle = new Rect();
private final static int RECTANGLE_SIZE = 20;
private boolean _showEyes = false;
private boolean _showTracking = true;
#SuppressLint("DrawAllocation")
#Override
protected void onDraw(final Canvas canvas) {
// super.onDraw(canvas);
if (_foundFace != null) {
_foundFace.getMidPoint(_middlePoint);
Log.d(TAG, "_middlePoint RANNNNNNN!!!!");
Log.i("Camera", _middlePoint.x + " : " + _middlePoint.y);
// portrait mode!
float heightRatio = getHeight() / (float) _previewSize.width;
float widthRatio = getWidth() / (float) _previewSize.height;
Log.i("Drawcall", _middlePoint.x + " : " + _middlePoint.y);
int realX = (int) (_middlePoint.x * widthRatio);
int realY = (int) (_middlePoint.y * heightRatio);
Log.i("Drawcall", "Real :" + realX + " : " + realY);
int halfEyeDist = (int) (widthRatio * _foundFace.eyesDistance() / 2);
if (_showTracking) {
// Middle point
Log.d(TAG, "_showTracking RANNNNNNN!!!!");
_trackingRectangle.left = realX - RECTANGLE_SIZE;
_trackingRectangle.top = realY - RECTANGLE_SIZE;
_trackingRectangle.right = realX + RECTANGLE_SIZE;
_trackingRectangle.bottom = realY + RECTANGLE_SIZE;
canvas.drawRect(_trackingRectangle, _middlePointColor);
}
if (_showEyes) {
// Left eye
Log.d(TAG, "_showEyes RANNNNNNN!!!!");
_trackingRectangle.left = realX - halfEyeDist - RECTANGLE_SIZE;
_trackingRectangle.top = realY - RECTANGLE_SIZE;
_trackingRectangle.right = realX - halfEyeDist + RECTANGLE_SIZE;
_trackingRectangle.bottom = realY + RECTANGLE_SIZE;
canvas.drawRect(_trackingRectangle, _eyeColor);
// Right eye
_trackingRectangle.left = realX + halfEyeDist - RECTANGLE_SIZE;
_trackingRectangle.top = realY - RECTANGLE_SIZE;
_trackingRectangle.right = realX + halfEyeDist + RECTANGLE_SIZE;
_trackingRectangle.bottom = realY + RECTANGLE_SIZE;
canvas.drawRect(_trackingRectangle, _eyeColor);
}
}
}
public void reset() {
Log.d(TAG, "reset RANNNNNNN!!!!");
_distanceAtCalibrationPoint = -1;
_currentAvgEyeDistance = -1;
_calibrated = false;
_calibrating = false;
_calibrationsLeft = -1;
}
/**
* Sets this current EYE distance to be the distance of a peace of a4 paper
* e.g. 29,7cm
*/
public void calibrate() {
Log.d(TAG, "calibrate RANNNNNNN!!!!");
if (!_calibrating || !_calibrated) {
_points = new ArrayList<>();
_calibrating = true;
_calibrationsLeft = CALIBRATION_MEASUREMENTS;
_threashold = CALIBRATION_MEASUREMENTS;
}
}
private void doneCalibrating() {
Log.d(TAG, "doneCalibrating RANNNNNNN!!!!");
_calibrated = true;
_calibrating = false;
_currentFaceDetectionThread = null;
// _measurementStartet = false;
_threashold = AVERAGE_THREASHHOLD;
_distanceAtCalibrationPoint = _currentAvgEyeDistance;
MessageHUB.get().sendMessage(MessageHUB.DONE_CALIBRATION, null);
}
public boolean isCalibrated() {
Log.d(TAG, "isCalibrated RANNNNNNN!!!!");
return _calibrated || _calibrating;
}
public void showMiddleEye(final boolean on) {
Log.d(TAG, "showMiddleEye RANNNNNNN!!!!");
_showTracking = on;
}
public void showEyePoints(final boolean on) {
Log.d(TAG, "showEyePoints RANNNNNNN!!!!");
_showEyes = on;
}
private void updateMeasurement(final FaceDetector.Face currentFace) {
if (currentFace == null) {
Log.d(TAG, "updateMeasurement RANNNNNNN!!!!");
// _facesFoundInMeasurement--;
return;
}
_foundFace = _currentFaceDetectionThread.getCurrentFace();
_points.add(new Point(_foundFace.eyesDistance(),
CALIBRATION_DISTANCE_A4_MM
* (_distanceAtCalibrationPoint / _foundFace
.eyesDistance())));
while (_points.size() > _threashold) {
_points.remove(0);
Log.d(TAG, "Removing points RANNNNNNN!!!!");
}
float sum = 0;
for (Point p : _points) {
sum += p.getEyeDistance();
Log.d(TAG, "adding points RANNNNNNN!!!!");
}
_currentAvgEyeDistance = sum / _points.size();
_currentDistanceToFace = CALIBRATION_DISTANCE_A4_MM
* (_distanceAtCalibrationPoint / _currentAvgEyeDistance);
_currentDistanceToFace = Util.MM_TO_CM(_currentDistanceToFace);
MeasurementStepMessage message = new MeasurementStepMessage();
message.setConfidence(currentFace.confidence());
message.setCurrentAvgEyeDistance(_currentAvgEyeDistance);
message.setDistToFace(_currentDistanceToFace);
message.setEyesDistance(currentFace.eyesDistance());
message.setMeasurementsLeft(_calibrationsLeft);
message.setProcessTimeForLastFrame(_processTimeForLastFrame);
MessageHUB.get().sendMessage(MessageHUB.MEASUREMENT_STEP, message);
}
private long _lastFrameStart = System.currentTimeMillis();
private float _processTimeForLastFrame = -1;
#Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
Log.d(TAG, "onPreviewFrame RANNNNNNN!!!!" + _calibrationsLeft);
if (_calibrationsLeft == -1)
return;
if (_calibrationsLeft > 0) {
// Doing calibration !
Log.d(TAG, "_calibrationLeft RANNNNNNN!!!!" + _calibrationsLeft);
if (_currentFaceDetectionThread != null
&& _currentFaceDetectionThread.isAlive()) {
Log.d(TAG, "_currentFaceDectectionThread RANNNNNNN!!!!" + _currentFaceDetectionThread);
// Drop Frame
return;
}
// No face detection started or already finished
_processTimeForLastFrame = System.currentTimeMillis()
- _lastFrameStart;
_lastFrameStart = System.currentTimeMillis();
if (_currentFaceDetectionThread != null) {
Log.d(TAG, "_calibrationLeft-- RANNNNNNN!!!!");
_calibrationsLeft--;
updateMeasurement(_currentFaceDetectionThread.getCurrentFace());
if (_calibrationsLeft == 0) {
Log.d(TAG, "Calibrating done RANNNNNNN!!!!");
doneCalibrating();
invalidate();
return;
}
}
_currentFaceDetectionThread = new FaceDetectionThread(data,
_previewSize);
_currentFaceDetectionThread.start();
invalidate();
} else {
// Simple Measurement
if (_currentFaceDetectionThread != null
&& _currentFaceDetectionThread.isAlive()) {
Log.d(TAG, "Dropping frames RANNNNNNN!!!!");
// Drop Frame
return;
}
// No face detection started or already finished
_processTimeForLastFrame = System.currentTimeMillis()
- _lastFrameStart;
_lastFrameStart = System.currentTimeMillis();
if (_currentFaceDetectionThread != null)
updateMeasurement(_currentFaceDetectionThread.getCurrentFace());
Log.d(TAG, "Updating measurements RANNNNNNN!!!!");
_currentFaceDetectionThread = new FaceDetectionThread(data,
_previewSize);
_currentFaceDetectionThread.start();
Log.d(TAG, "invalidate RANNNNNNN!!!!");
invalidate();
}
}
/*
* SURFACE METHODS, TO CREATE AND RELEASE SURFACE THE CORRECT WAY.
*
* #see
* android.view.SurfaceHolder.Callback#surfaceCreated(android.view.SurfaceHolder
* )
*/
#Override
public void surfaceCreated(final SurfaceHolder holder) {
synchronized (this) {
// This allows us to make our own drawBitmap
this.setWillNotDraw(false);
}
}
#Override
public void surfaceDestroyed(final SurfaceHolder holder) {
mCamera.release();
mCamera = null;
}
#Override
public void surfaceChanged(final SurfaceHolder holder, final int format,
final int width, final int height) {
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
Parameters parameters = mCamera.getParameters();
_previewSize = parameters.getPreviewSize();
// mCamera.setDisplayOrientation(90);
// mCamera.setParameters(parameters);
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
mCamera.setPreviewCallback(this);
} catch (Exception e) {
Log.d("This", "Error starting camera preview: " + e.getMessage());
}
}
}
Here is the other code.
package com.example.phliip_vision;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera.Size;
import android.media.FaceDetector;
import android.media.FaceDetector.Face;
import android.util.Log;
public class FaceDetectionThread extends Thread {
public static final String FACEDETECTIONTHREAD_TAG = "FaceDetectionThread_Tag";
private static final String TAG = "FaceDetectionThread";
private Face _currentFace;
private final byte[] _data;
private final Size _previewSize;
private Bitmap _currentFrame;
public FaceDetectionThread(final byte[] data, final Size previewSize) {
Log.d(TAG, "What are we waiting on in FaceDetectionThread????");
_data = data;
_previewSize = previewSize;
}
public Face getCurrentFace() {
Log.d(TAG, "What are we waiting on in Current faces????");
return _currentFace;
}
public Bitmap getCurrentFrame() {
return _currentFrame;
}
/**
* bla bla bla
*/
#Override
public void run() {
long t = System.currentTimeMillis();
YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,
_previewSize.width, _previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
if (!yuvimage.compressToJpeg(new Rect(0, 0, _previewSize.width,
_previewSize.height), 100, baos)) {
Log.e("Camera", "compressToJpeg failed");
}
Log.i("Timing", "Compression finished: "
+ (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
_currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(
baos.toByteArray()), null, bfo);
Log.i("Timing", "Decode Finished: " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
// Rotate the so it siuts our portrait mode
Matrix matrix = new Matrix();
matrix.postRotate(90);
matrix.preScale(-1, 1);
// We rotate the same Bitmap
_currentFrame = Bitmap.createBitmap(_currentFrame, 0, 0,
_previewSize.width, _previewSize.height, matrix, false);
Log.i("Timing",
"Rotate, Create finished: " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
if (_currentFrame == null) {
Log.e(FACEDETECTIONTHREAD_TAG, "Could not decode Image");
return;
}
FaceDetector d = new FaceDetector(_currentFrame.getWidth(),
_currentFrame.getHeight(), 1);
Face[] faces = new Face[1];
d.findFaces(_currentFrame, faces);
Log.i("Timing",
"FaceDetection finished: " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
_currentFace = faces[0];
Log.d(FACEDETECTIONTHREAD_TAG, "Found: " + faces[0] + " Faces");
}
}
enter image description here
enter image description here

Related

NullPointerException not in my code but in onResume() for LibGDX AndroidInput

This the stack trace:
E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.epl.game, PID: 18789
java.lang.RuntimeException: Unable to resume activity {com.epl.game/com.epl.game.AndroidLauncher}: java.lang.NullPointerException: Attempt to invoke virtual method 'void com.badlogic.gdx.backends.android.AndroidInput.onResume()' on a null object reference
at android.app.ActivityThread.performResumeActivity(ActivityThread.java:4205)
at android.app.ActivityThread.handleResumeActivity(ActivityThread.java:4237)
at android.app.servertransaction.ResumeActivityItem.execute(ResumeActivityItem.java:52)
at android.app.servertransaction.TransactionExecutor.executeLifecycleState(TransactionExecutor.java:176)
at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:97)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:2016)
at android.os.Handler.dispatchMessage(Handler.java:107)
at android.os.Looper.loop(Looper.java:214)
at android.app.ActivityThread.main(ActivityThread.java:7356)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:492)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:930)
Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'void com.badlogic.gdx.backends.android.AndroidInput.onResume()' on a null object reference
at com.badlogic.gdx.backends.android.AndroidApplication.onResume(AndroidApplication.java:300)
at android.app.Instrumentation.callActivityOnResume(Instrumentation.java:1453)
at android.app.Activity.performResume(Activity.java:7962)
at android.app.ActivityThread.performResumeActivity(ActivityThread.java:4195)
at android.app.ActivityThread.handleResumeActivity(ActivityThread.java:4237) 
at android.app.servertransaction.ResumeActivityItem.execute(ResumeActivityItem.java:52) 
at android.app.servertransaction.TransactionExecutor.executeLifecycleState(TransactionExecutor.java:176) 
at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:97) 
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:2016) 
at android.os.Handler.dispatchMessage(Handler.java:107) 
at android.os.Looper.loop(Looper.java:214) 
at android.app.ActivityThread.main(ActivityThread.java:7356) 
at java.lang.reflect.Method.invoke(Native Method) 
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:49 
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:930) 
This is my main project
package com.epl.game;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.math.Intersector;
import com.badlogic.gdx.math.Rectangle;
import org.omg.PortableServer.POAManagerPackage.State;
import java.util.ArrayList;
import java.util.Random;
public class epl extends ApplicationAdapter {
MyTextInputListener listener = new MyTextInputListener();
SpriteBatch batch;
Texture background;
Texture[] man;
State[] gsm;
int batsmanState = 0;
int pause = 0;
float gravity = 0.2f;
float velocity = 0;
int manY = 0;
Rectangle manRectangle;
BitmapFont font1;
BitmapFont font2;
BitmapFont font3;
Texture dizzy;
int score = 0;
int gameState = 0;
int i1 = 0;
int i2 = 0;
State state0;
State state1;
State state2;
State state3;
State state4;
State state5;
Random random;
String humanName;
ArrayList<Integer> coinXs = new ArrayList<>();
ArrayList<Integer> coinYs = new ArrayList<>();
ArrayList<Rectangle> coinRectangles = new ArrayList<>();
Texture coin;
int coinCount;
ArrayList<Integer> bombXs = new ArrayList<>();
ArrayList<Integer> bombYs = new ArrayList<>();
ArrayList<Rectangle> bombRectangles = new ArrayList<>();
Texture bomb;
int bombCount;
PlayServices ply;
#Override
public void create() {
batch = new SpriteBatch();
background = new Texture("bg.png");
man = new Texture[4];
man[0] = new Texture("batsman.jpg");
man[1] = new Texture("batsman.jpg");
man[2] = new Texture("batsman.jpg");
man[3] = new Texture("batsman.jpg");
gsm = new State[6];
gsm[0] = (state0);
gsm[1] = (state1);
gsm[2] = (state2);
gsm[3] = (state3);
gsm[4] = (state4);
gsm[5] = (state5);
manY = Gdx.graphics.getHeight() / 2;
coin = new Texture("ball.png");
bomb = new Texture("stump.jpeg");
random = new Random();
dizzy = new Texture("out.jpeg");
font1 = new BitmapFont();
font1.setColor(Color.RED);
font1.getData().setScale(10);
font2 = new BitmapFont();
font2.setColor(Color.RED);
font2.getData().setScale(10);
font3 = new BitmapFont();
font3.setColor(Color.RED);
font3.getData().setScale(10);
}
public void makeCoin() {
float height = random.nextFloat() * Gdx.graphics.getHeight();
coinYs.add((int) height);
coinXs.add(Gdx.graphics.getWidth());
}
public void makeBomb() {
float height = random.nextFloat() * Gdx.graphics.getHeight();
bombYs.add((int)height);
bombXs.add(Gdx.graphics.getWidth());
}
private String myText;
public class MyTextInputListener implements Input.TextInputListener {
#Override
public void input(String text) {
}
#Override
public void canceled() {
whatIsYourName();
}
public String getText() {
return myText;
}
public void whatIsYourName() {
Gdx.input.getTextInput(listener, "Name : ", "", "eg:Jonathan");
humanName = listener.getText();
gameState = 1;
}
}
public epl(PlayServices ply){
this.ply = ply;
}
#Override
public void render () {
batch.begin();
batch.draw(background, 0, 0, Gdx.graphics.getWidth(),
Gdx.graphics.getHeight());
if (gameState == 1 && state1 == null) {
// GAME IS LIVE
// BOMB
if (bombCount < 250) {
bombCount++;
} else {
bombCount = 0;
makeBomb();
}
bombRectangles.clear();
for (int i = 0; i < bombXs.size(); i++) {
batch.draw(bomb, bombXs.get(i), bombYs.get(i));
bombXs.set(i, bombXs.get(i) - 8);
bombRectangles.add(new Rectangle(bombXs.get(i),
bombYs.get(i),
bomb.getWidth(), bomb.getHeight()));
}
// COINS
if (coinCount < 100) {
coinCount++;
} else {
coinCount = 0;
makeCoin();
}
coinRectangles.clear();
for (int i = 0; i < coinXs.size(); i++) {
batch.draw(coin, coinXs.get(i), coinYs.get(i));
coinXs.set(i, coinXs.get(i) - 4);
coinRectangles.add(new Rectangle(coinXs.get(i),
coinYs.get(i),
coin.getWidth(), coin.getHeight()));
}
if (Gdx.input.justTouched()) {
velocity = -10;
}
if (pause < 8) {
pause++;
} else {
pause = 0;
if (batsmanState < 3) {
batsmanState++;
} else {
batsmanState = 0;
}
}
velocity += gravity;
manY -= velocity;
if (manY <= 0) {
manY = 0;
}
} else if (gameState == 5 && state5 == null) {
//leaderboard
if (Gdx.input.justTouched()){
ply.submitScore(humanName,score);
ply.showScore(humanName);
gameState = 1;
}
}else if (gameState == 3 && state3 == null) {
//name
listener.whatIsYourName();
gameState = 1;
} else if (gameState == 0 && state0 == null) {
// Waiting to start
if (humanName == null){
gameState = 3;
}else{
gameState = 1;
}
} else if (gameState == 4 && state4 == null) {
//final score display
font3.draw(batch, "Score = " + score,100,1400);
if (Gdx.input.justTouched()){
score = 0;
gameState = 1;
}
}else if (gameState == 2 && state2 == null) {
// GAME OVER
if (Gdx.input.justTouched()) {
manY = Gdx.graphics.getHeight() / 2;
velocity = 0;
coinXs.clear();
coinYs.clear();
coinRectangles.clear();
coinCount = 0;
bombXs.clear();
bombYs.clear();
bombRectangles.clear();
bombCount = 0;
i1 = 0;
i2 = 0;
}
}
if (gameState == 2) {
batch.draw(dizzy, Gdx.graphics.getWidth() / 2 -
man[batsmanState].getWidth() / 2, manY);
if (Gdx.input.justTouched()){
gameState = 4;
}
} else {
batch.draw(man[batsmanState], Gdx.graphics.getWidth() / 2 -
man[batsmanState].getWidth() / 2, manY);
}
manRectangle = new Rectangle(Gdx.graphics.getWidth() / 2 -
man[batsmanState].getWidth() / 2, manY,
man[batsmanState].getWidth(), man[batsmanState].getHeight());
for (int i=0; i < coinRectangles.size();i++) {
if (Intersector.overlaps(manRectangle, coinRectangles.get(i))) {
score++;
i1 = random.nextInt((4 -1) + 1);
score = score + i1;
i2 = i1 + 1;
coinRectangles.remove(i);
coinXs.remove(i);
coinYs.remove(i);
break;
}
}
for (int i=0; i < bombRectangles.size();i++) {
if (Intersector.overlaps(manRectangle, bombRectangles.get(i))) {
gameState = 2;
}
}
font1.draw(batch, String.valueOf(score),100,200);
font2.draw(batch, String.valueOf(i2),900,200);
batch.end();
}
#Override
public void dispose () {
batch.dispose();
}
}
This is my android launcher
package com.epl.game;
import android.content.Intent;
import android.os.Bundle;
import com.badlogic.gdx.backends.android.AndroidApplication;
import com.google.android.gms.games.Games;
import com.google.example.games.basegameutils.GameHelper;
public class AndroidLauncher extends AndroidApplication implements PlayServices {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
gameHelper = new GameHelper(this, GameHelper.CLIENT_GAMES);
gameHelper.enableDebugLog(true);
GameHelper.GameHelperListener gameHelperListener = new GameHelper.GameHelperListener() {
#Override
public void onSignInFailed() {
}
#Override
public void onSignInSucceeded() {
}
};
gameHelper.setup(gameHelperListener);
}
String leaderboard = "CgkI7PuNlqsVEAIQAA";
private GameHelper gameHelper;
#Override
protected void onStart() {
super.onStart();
gameHelper.onStart(this); // You will be logged in to google play services as soon as you open app , i,e on start
}
#Override
protected void onStop() {
super.onStop();
gameHelper.onStop();
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
gameHelper.onActivityResult(requestCode, resultCode, data);
}
#Override
public boolean signIn() {
try {
runOnUiThread(new Runnable() {
#Override
public void run() {
gameHelper.beginUserInitiatedSignIn();
}
});
} catch (Exception e) {
}
return true;
}
#Override
public void submitScore(String LeaderBoard, int highScore) {
if (isSignedIn()) {
Games.Leaderboards.submitScore(gameHelper.getApiClient(), LeaderBoard, highScore);
} else {
System.out.println(" Not signin Yet ");
}
}
#Override
public void showScore(String leaderboard) {
if (isSignedIn()) {
startActivityForResult(Games.Leaderboards.getLeaderboardIntent(gameHelper.getApiClient(), leaderboard), 1);
} else {
signIn();
}
}
#Override
public boolean isSignedIn() {
return false;
}
And this is my play services interface
package com.epl.game;
public interface PlayServices
{
boolean signIn();
void submitScore(String LeaderBoard, int highScore);
void showScore(String LeaderBoard);
boolean isSignedIn();
}
I am new to libgdx and I am trying to create a game with a leaderboard .
I created this by importing the BaseGameUtils .
Else if you have another way i could create a global leaderboard in my
game please let me know.
It is critical that you call the initialize method in onCreate of your AndroidLauncher class. This is what sets up LibGDX's backends for graphics, sound, and input. Since you did not call initialize, the input class (along with graphics, sound, files, etc.) was not set up and assigned, and so is still null when the resume part of the lifecycle is reached. This leads to the NullPointerException.
In your case, your onCreate method should look something like:
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AndroidApplicationConfiguration config = new AndroidApplicationConfiguration();
// customize the configuration here
initialize(new epl(), config);
// Your other setup code...
}
Note, class names in Java should always start with a capital letter. It will make it easier to read and understand your code.

RTMP live transcription

I want to transcribe the live rtmp stream using google speech to text.
In the google code mic is the input source of the audio stream but here I want to use the rtmp instead of mic.
I am reading byte array using xuggler and storing in sharedQueue.
But my code is failing with below exception.
io.grpc.StatusRuntimeException: CANCE LLED: Failed to read message.
public class DataLoader2 implements Runnable {
static ArrayList<Byte> data = new ArrayList<Byte>();
static byte[] audioChunk = new byte[1150];
static ByteBuffer buff;
private static void extractAudio(String rtmpSourceUrl) {
IMediaReader mediaReader = ToolFactory.makeReader(rtmpSourceUrl);
mediaReader.addListener(new MediaToolAdapter() {
private IContainer container;
#Override
public void onReadPacket(IReadPacketEvent event) {
event.getPacket().getByteBuffer().get(audioChunk);
try {
SpeechToText.sharedQueue.put(audioChunk);
} catch (InterruptedException e) {
}
}
#Override
public void onOpenCoder(IOpenCoderEvent event) {
buff = ByteBuffer.wrap(audioChunk);
container = event.getSource().getContainer();
}
#Override
public void onAudioSamples(IAudioSamplesEvent event) {
/*
* if (DataLoader2.data.size() < 6400) {
* DataLoader2.data.add(event.getMediaData().getByteBuffer().get()); } else {
*
* for (byte audio : DataLoader2.data) { buff.put(audio); }
*
* byte[] combined = buff.array();
*
* try { SpeechToText.sharedQueue.put(combined); } catch (InterruptedException
* e) { e.printStackTrace(); }
*
* DataLoader2.data.clear(); buff.clear(); buff = ByteBuffer.wrap(audioChunk);
*
* }
*/
// System.out.println("Event:" + event.getMediaData().getByteBuffer().get());
// SpeechToText.sharedQueue.put(event.getMediaData().getByteBuffer().get());
}
#Override
public void onClose(ICloseEvent event) {
}
});
while (mediaReader.readPacket() == null) {
}
}
#Override
public void run() {
String rtmpSourceUrl = "rtmp://localhost:1935/livewowza/xyz";
extractAudio(rtmpSourceUrl);
}
}
public class SpeechToText {
private static final int STREAMING_LIMIT = 10000; // 10 seconds
public static final String RED = "\033[0;31m";
public static final String GREEN = "\033[0;32m";
public static final String YELLOW = "\033[0;33m";
// Creating shared object
public static volatile BlockingQueue<byte[]> sharedQueue = new LinkedBlockingQueue();
private static TargetDataLine targetDataLine;
private static int BYTES_PER_BUFFER = 6400; // buffer size in bytes
private static int restartCounter = 0;
private static ArrayList<ByteString> audioInput = new ArrayList<ByteString>();
private static ArrayList<ByteString> lastAudioInput = new ArrayList<ByteString>();
private static int resultEndTimeInMS = 0;
private static int isFinalEndTime = 0;
private static int finalRequestEndTime = 0;
private static boolean newStream = true;
private static double bridgingOffset = 0;
private static boolean lastTranscriptWasFinal = false;
private static StreamController referenceToStreamController;
private static ByteString tempByteString;
private static void start() {
ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
try (SpeechClient client = SpeechClient.create()) {
ClientStream<StreamingRecognizeRequest> clientStream;
responseObserver = new ResponseObserver<StreamingRecognizeResponse>() {
ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();
#Override
public void onComplete() {
System.out.println("!!!!!!!!!!!!!!!!!!!!!");
}
#Override
public void onError(Throwable arg0) {
System.out.println(arg0.getMessage());
}
#Override
public void onResponse(StreamingRecognizeResponse response) {
System.out.println("Inside onResponse ------------");
responses.add(response);
StreamingRecognitionResult result = response.getResultsList().get(0);
Duration resultEndTime = result.getResultEndTime();
resultEndTimeInMS = (int) ((resultEndTime.getSeconds() * 1000)
+ (resultEndTime.getNanos() / 1000000));
double correctedTime = resultEndTimeInMS - bridgingOffset + (STREAMING_LIMIT * restartCounter);
DecimalFormat format = new DecimalFormat("0.#");
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
if (result.getIsFinal()) {
System.out.print(GREEN);
System.out.print("\033[2K\r");
System.out.printf("%s: %s\n", format.format(correctedTime), alternative.getTranscript());
isFinalEndTime = resultEndTimeInMS;
lastTranscriptWasFinal = true;
} else {
System.out.print(RED);
System.out.print("\033[2K\r");
System.out.printf("%s: %s", format.format(correctedTime), alternative.getTranscript());
lastTranscriptWasFinal = false;
}
}
#Override
public void onStart(StreamController controller) {
referenceToStreamController = controller;
}
};
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
RecognitionConfig recognitionConfig = RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16).setLanguageCode("en-US")
.setSampleRateHertz(16000)
.build();
StreamingRecognitionConfig streamingRecognitionConfig = StreamingRecognitionConfig.newBuilder()
.setConfig(recognitionConfig).setInterimResults(true).build();
StreamingRecognizeRequest request = StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig).build(); // The first request in a streaming call
// has to be a config
clientStream.send(request);
System.out.println("Configuration request sent");
long startTime = System.currentTimeMillis();
while (true) {
Thread.sleep(5000);
long estimatedTime = System.currentTimeMillis() - startTime;
if (estimatedTime >= STREAMING_LIMIT) {
clientStream.closeSend(); referenceToStreamController.cancel(); // remove
if (resultEndTimeInMS > 0) { finalRequestEndTime = isFinalEndTime; }
resultEndTimeInMS = 0;
lastAudioInput = null; lastAudioInput = audioInput; audioInput = new
ArrayList<ByteString>();
restartCounter++;
if (!lastTranscriptWasFinal) { System.out.print('\n'); }
newStream = true;
clientStream =
client.streamingRecognizeCallable().splitCall(responseObserver);
request = StreamingRecognizeRequest.newBuilder().setStreamingConfig(
streamingRecognitionConfig) .build();
System.out.println(YELLOW); System.out.printf("%d: RESTARTING REQUEST\n",
restartCounter * STREAMING_LIMIT);
startTime = System.currentTimeMillis();
} else {
if ((newStream) && (lastAudioInput.size() > 0)) {
// if this is the first audio from a new request
// calculate amount of unfinalized audio from last request
// resend the audio to the speech client before incoming audio
double chunkTime = STREAMING_LIMIT / lastAudioInput.size();
// ms length of each chunk in previous request audio arrayList
if (chunkTime != 0) {
if (bridgingOffset < 0) {
// bridging Offset accounts for time of resent audio
// calculated from last request
bridgingOffset = 0;
}
if (bridgingOffset > finalRequestEndTime) {
bridgingOffset = finalRequestEndTime;
}
int chunksFromMS = (int) Math.floor((finalRequestEndTime - bridgingOffset) / chunkTime);
// chunks from MS is number of chunks to resend
bridgingOffset = (int) Math.floor((lastAudioInput.size() - chunksFromMS) * chunkTime);
// set bridging offset for next request
for (int i = chunksFromMS; i < lastAudioInput.size(); i++) {
request = StreamingRecognizeRequest.newBuilder().setAudioContent(lastAudioInput.get(i))
.build();
clientStream.send(request);
}
}
newStream = false;
}
tempByteString = ByteString.copyFrom(sharedQueue.take());
request = StreamingRecognizeRequest.newBuilder().setAudioContent(tempByteString).build();
audioInput.add(tempByteString);
}
clientStream.send(request);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String args[]) {
DataLoader2 dataLoader = new DataLoader2();
Thread t = new Thread(dataLoader);
t.start();
SpeechToText.start();
}
}
FFmpeg command for pcm encoding.
ffmpeg -i rtmp://localhost:1935/liveapp/abc -c:a pcm_s16le -ac 1 -ar 16000 -f flv rtmp://localhost:1935/livewowza/xyz

How can opencv image processing run faster?

I am working on android studio with OpenCV library. I am dealing with ColorBlobDetectionActivity class so I want to rearrenge its processing. OpenCV processes all screen but I want to process just particular region to make it faster on android camera.Can someone help me?
Here it is ColorBlobDetectionActivity Class Code:
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobDetectionActivity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public ColorBlobDetectionActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.color_blob_detection_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.color_blob_detection_activity_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}

BarChart Update

Now it is another problem that occurred, it seems that the query I'm using only works for the first time but after pressing the next/previous buttons, it is giving me something else !!
Here is the query I used:
for (int i = 0; i <= months.length + 1; i++) {
try {
String a;
if (i < 9) {
a = y + "0" + (i + 1);
} else {
a = y + "" + (i + 1);
}
System.out.println("Année Courante " + a);
conn = DBConnection.connect();
String sql = "select sum(montant_operation) from operations where (select Extract(YEAR_MONTH from date_operation)) = '" + a + "' and typ_operation ='Versement';";
final ResultSet rs = conn.prepareStatement(sql).executeQuery();
if (rs.next()) {
System.out.println(series1.getData().toString());
series1.getData().add(new XYChart.Data<>(months[i], rs.getFloat("sum(montant_operation)")));
}
} catch (SQLException e) {
System.out.println(e);
}
}
But is there a query that works fine one time and then it gives error.
Have a nice day
In your BuildData (by the way, not following Java Naming Conventions, change its name) method you are updating the data of series. In the same method you are adding this series to the chart. By clicking the "next" button, BuildData method is invoked where this chart is added again which is unnecessary. Delete the
Platform.runLater(() -> {
barchart.getData().add(series1);
});
part from the method and add the chart only once in start:
...
...
vbox.getChildren().addAll(box, barchart);
barchart.getData().add(series1);
pane.getChildren().add(vbox);
...
...
The tested SSCCE:
import java.util.Calendar;
import javafx.application.Application;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Insets;
import javafx.geometry.Orientation;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.chart.BarChart;
import javafx.scene.chart.CategoryAxis;
import javafx.scene.chart.NumberAxis;
import javafx.scene.chart.XYChart;
import javafx.scene.control.Button;
import javafx.scene.layout.FlowPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
public class BarChartDemo extends Application {
final String[] months = {"Janvier", "Février", "Mars", "Avril", "Mai", "Juin", "Juillet", "Août", "Septembre", "Octobre", "Novembre", "Décembre"};
//Connection conn;
final CategoryAxis month_axis = new CategoryAxis();
final NumberAxis data_axis = new NumberAxis();
final XYChart.Series<String, Number> series1 = new XYChart.Series();
private final BarChart<String, Number> barchart = new BarChart(month_axis, data_axis);
private Integer year = 0;
#Override
public void start(Stage primaryStage) {
year = Calendar.getInstance().get(Calendar.YEAR);
Button btn_next = new Button("NEXT");
Button btn_previous = new Button("PREVIOUS");
HBox box = new HBox(50);
box.getChildren().addAll(btn_previous, btn_next);
box.setAlignment(Pos.TOP_CENTER);
VBox vbox = new VBox(25);
box.setPadding(new Insets(10, 0, 10, 0));
FlowPane pane = new FlowPane(Orientation.VERTICAL);
vbox.getChildren().addAll(box, barchart);
barchart.getData().add(series1);
pane.getChildren().add(vbox);
Scene scene = new Scene(pane);
primaryStage.setTitle("Hello World!");
primaryStage.setScene(scene);
primaryStage.show();
BuildData(year);
btn_next.setOnAction(new EventHandler<ActionEvent>() {
#Override
public void handle(ActionEvent event) {
year += 1;
BuildData(year);
}
});
btn_previous.setOnAction(new EventHandler<ActionEvent>() {
#Override
public void handle(ActionEvent event) {
year -= 1;
BuildData(year);
}
});
}
public static void main(String[] args) {
launch(args);
}
private void BuildData(Integer y) {
series1.setName("Versement");
month_axis.setLabel("Mois de l'Année");
month_axis.setStyle("-fx-font-weight:BOLD;" + "-fx-font-size:15");
data_axis.setLabel("Valeur des Opérations Bancaires");
data_axis.setStyle("-fx-font-weight:BOLD;" + "-fx-font-size:15");
series1.getData().clear(); // clear old values
for (int i = 0; i < months.length; i++) {
series1.getData().add(new XYChart.Data(months[i], i * 10 * (y-2000)));
// try {
// String a;
// if (i < 9) {
// a = y + "0" + (i + 1);
// } else {
// a = y + "" + (i + 1);
// }
// conn = DBConnection.connect();
// String sql = "select sum(montant_operation) from operations where (select Extract(YEAR_MONTH from date_operation)) = '" + a + "' and typ_operation ='Versement';";
// final ResultSet rs = conn.prepareStatement(sql).executeQuery();
// if (rs.next()) {
// series1.getData().add(new XYChart.Data<>(months[i], rs.getFloat("sum(montant_operation)")));
// }
// } catch (SQLException e) {
// System.out.println(e);
// }
}
}
}

Android WebView shows extra horizontal white spaces in Android 4.0+ devices

In my code attached, I am trying to use two Android WebViews (webView1 and webView2) to display a javaScript enabled menu inside webView1 and a page showing the result of menu-click triggered in webView1 inside webView2.
As the code show, I am dynamically trying to fit the WebViews dynamically to the target screen by calculating my own initial scales and then plug the computed scales into the WebViews during Run-time.
Here is the problem I would like to ask for your assistance... When I click on the menu items in webView1 in order to show the result in webView2, extra horizontal white spaces (size of the horizontal white spaces are about the same width of the target screen, but the horizontal scroll bar is about the witdh of webView2) are shown in webView2. The white spaces appears randomly (i.e. The white spaces somtimes appears sometimes not).
I have tried adding function calls to clear the WebViews cache. The horiztonal white spaces appears less frequently in webView2, but the problem still persists.
It is interesting to know that this only happen in Android 4.0+ devices and does not happen in devices whose OS is Android 3 and below. We have tested this using Samsung Galaxy Notes II and Samsung Galaxy Tab 10.1 for Android 4 devices. We tested on Huawei Ideos U8150 for Android 2.2 device and a no brand device for Android 3 device.
I have set the minimum SDK target to API 8 and the maximum SDK target to API 15 when compiling the Android project.
I have already done extensive research on this problem in the Internet (including in StackOverflow) for 2 days, but I could not find any post similar to my problem's context.
I would like to know how to remove the horizontal white spaces in webView2. Please assists.
Thanks.
Please see code listing below:
package com.xyz.XyzPkg;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.os.AsyncTask;
import android.os.Bundle;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Gravity;
import android.view.Menu;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.ViewGroup;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import android.widget.LinearLayout;
import android.widget.LinearLayout.LayoutParams;
import android.widget.ScrollView;
import android.graphics.Bitmap;
public class OpenxyzActivitiesActivity extends Activity implements OnClickListener {
private final static int ID_BTNBACK = 987654321;
private final static int ID_BTNACTIVITY_UI1 = 999977777;
private final static int ID_BTNACTIVITY_UI2 = 999977776;
WebView webView1;
WebView webView2;
protected String TAG = OpenxyzActivitiesActivity.class.getSimpleName();
ImageButton btnBack;
int screenwidth = 0;
float fOrigWidth1 = 0.0f; //180.0f;
float fOrigWidth2 = 0.0f; //300.0f;
float fWeightSum = 0.0f;
float fWeight1 = 0.0f;
float fWeight2 = 0.0f;
float fWindowWidth = 0.0f;
float fWindowHeight = 0.0f;
float fWindowDensity = 0.0f;
float fInitScale1 = 0.0f;
float fInitScale2 = 0.0f;
int fViewportSize1 = 0;
int fViewportSize2 = 0;
int delta = 0;
boolean bChosenWidth = false;
String sUrlBase = "http://test.xyz.com/parents";
LinearLayout LinearLayout1;
ImageButton btnActivityUI1;
ImageButton btnActivityUI2;
FrameLayout frameLayout0;
LinearLayout LinearLayout4;
final int ID_WEBVIEW1 = 987654321;
#SuppressLint("SetJavaScriptEnabled")
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
webView1 = new WebView(this);
webView1.setId(ID_WEBVIEW1);
webView1.getSettings().setJavaScriptEnabled(true);
webView2 = new WebView(this);
//new InitTask().execute(new String[]{sUrlBase + "/activities/sendActvtData.php"});
new InitTask().execute(new String[]{sUrlBase + "/activities/sendActvtData.php"});
}
//public class InitThread implements Runnable{
private class InitTask extends AsyncTask<String, Void, ArrayList<Float> >{
#Override
protected void onPostExecute(ArrayList<Float> result) {
// TODO Auto-generated method stub
super.onPostExecute(result);
frameLayout0 = new FrameLayout(getApplicationContext());
LinearLayout.LayoutParams oFrameLayout0Params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT);
frameLayout0.setLayoutParams(oFrameLayout0Params);
LinearLayout LinearLayout0 = new LinearLayout(getApplicationContext());
LinearLayout0.setWeightSum(100.0f);
LinearLayout LinearLayout1 = new LinearLayout(getApplicationContext());
LinearLayout.LayoutParams oLinearLayout1Params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT);
LinearLayout1.setLayoutParams(oLinearLayout1Params);
LinearLayout1.setWeightSum(100.0f);
//BEGIN: Setting up Window Size -- Change Windows Size in UI Thread
LayoutParams oLayoutParams = (LayoutParams)LinearLayout1.getLayoutParams();
oLayoutParams.width = (int)fWindowWidth;
oLayoutParams.height = (int)fWindowHeight;
LinearLayout1.setLayoutParams(oLayoutParams);
LinearLayout linearLayoutDummyProduct = new LinearLayout(getApplicationContext());
android.widget.LinearLayout.LayoutParams linearLayoutDummyProductLayoutParams = new android.widget.LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT);
linearLayoutDummyProduct.setLayoutParams(linearLayoutDummyProductLayoutParams);
oLayoutParams = (LayoutParams)linearLayoutDummyProduct.getLayoutParams();
LinearLayout linearLayoutDummyProduct2 = new LinearLayout(getApplicationContext());
linearLayoutDummyProduct2.setLayoutParams(linearLayoutDummyProductLayoutParams);
oLayoutParams = (LayoutParams)linearLayoutDummyProduct2.getLayoutParams();
float fDummy1Weight = 0.0f;
float fFrameLayoutWeight = 0.0f;
if (bChosenWidth)
{
LinearLayout0.setOrientation(LinearLayout.VERTICAL);
fDummy1Weight = 100.0f*delta/fWindowHeight;
linearLayoutDummyProductLayoutParams = new android.widget.LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, 0, fDummy1Weight);
fFrameLayoutWeight = 100.0f-2*fDummy1Weight;
oFrameLayout0Params = new android.widget.LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, 0, fFrameLayoutWeight);
Log.d(TAG, "fDummy1Weight = 100.0f*delta/fWindowHeight;");
}
else{
LinearLayout0.setOrientation(LinearLayout.HORIZONTAL);
fDummy1Weight = 100.0f*delta/fWindowWidth;
linearLayoutDummyProductLayoutParams = new android.widget.LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, fDummy1Weight);
fFrameLayoutWeight = 100.0f-2*fDummy1Weight;
oFrameLayout0Params = new android.widget.LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, fFrameLayoutWeight);
Log.d(TAG, "fDummy1Weight = 100.0f*delta/fWindowWidth;");
}
Log.d(TAG, "fDummy1Weight = " + fDummy1Weight);
Log.d(TAG, "fFrameLayoutWeight = " + fFrameLayoutWeight);
linearLayoutDummyProduct.setLayoutParams(linearLayoutDummyProductLayoutParams);
linearLayoutDummyProduct2.setLayoutParams(linearLayoutDummyProductLayoutParams);
frameLayout0.setLayoutParams(oFrameLayout0Params);
//END: Setting up Window Size
float fWeightSum = LinearLayout1.getWeightSum();
float fWidthSum = fOrigWidth1 + fOrigWidth2;
fWeight1 = fOrigWidth1 / fWidthSum * fWeightSum;
fWeight2 = fOrigWidth2 / fWidthSum * fWeightSum;
fViewportSize1 = screenwidth * (int)fOrigWidth1 / (int)(fOrigWidth1+fOrigWidth2);
fViewportSize2 = screenwidth * (int)fOrigWidth2 / (int)(fOrigWidth1+fOrigWidth2);
Log.d(TAG, "onPostExecute: fViewportSize1=" + fViewportSize1);
Log.d(TAG, "onPostExecute: fViewportSize2=" + fViewportSize2);
android.widget.LinearLayout.LayoutParams oLinearLayoutParams1 = new android.widget.LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, fWeight1);
android.widget.LinearLayout.LayoutParams oLinearLayoutParams2 = new android.widget.LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, fWeight2);
webView1.setLayoutParams(oLinearLayoutParams1);
webView2.setLayoutParams(oLinearLayoutParams2);
oLinearLayoutParams1 = (android.widget.LinearLayout.LayoutParams)webView1.getLayoutParams();
oLinearLayoutParams2 = (android.widget.LinearLayout.LayoutParams)webView2.getLayoutParams();
webView2.getSettings().setUseWideViewPort(false);
LinearLayout1.addView(webView1);
LinearLayout1.addView(webView2);
Log.d(TAG, "onPostExecute: fWeightSum=" + fWeightSum);
Log.d(TAG, "onPostExecute: fWidthSum=" + fWidthSum);
Log.d(TAG, "onPostExecute: oLinearLayoutParams1.weight=" + oLinearLayoutParams1.weight);
Log.d(TAG, "onPostExecute: oLinearLayoutParams2.weight=" + oLinearLayoutParams2.weight);
webView1.setInitialScale((int)fInitScale1);
webView2.setInitialScale((int)fInitScale2);
webView1.setScrollBarStyle(WebView.SCROLLBARS_OUTSIDE_OVERLAY);
webView2.setScrollBarStyle(WebView.SCROLLBARS_OUTSIDE_OVERLAY);
webView1.setPersistentDrawingCache(ViewGroup.PERSISTENT_NO_CACHE);
webView1.clearCache(true);
webView1.clearHistory();
webView1.getSettings().setCacheMode(WebSettings.LOAD_NO_CACHE);
//webView1.loadUrl(sUrlBase + "/activities/?lang=en");
String sWebView1Url = sUrlBase + "/activities/?lang=en&viewport_width="+fViewportSize1 + "&viewport_initial_scale=" + fInitScale1*0.01f;
webView1.loadUrl(sWebView1Url);
Log.d(TAG, "onPostExecute(): sWebView1Url: " + sWebView1Url);
webView1.setWebViewClient(new WebViewClient() {
public boolean shouldOverrideUrlLoading(WebView view, String url){
//webView1.stopLoading();
url = url + "&viewport_width=" + fViewportSize2 + "&viewport_initial_scale=" + fInitScale2*0.01f;
Log.d(TAG, "setWebViewClient: CHK000: url: " + url);
if (view.getId() == ID_WEBVIEW1)
{
Log.d(TAG, "ID_WEBVIEW1: " + ID_WEBVIEW1);
Log.d(TAG, "WebView1 stopped loading");
//view.stopLoading();
webView1.setPersistentDrawingCache(ViewGroup.PERSISTENT_NO_CACHE);
webView1.clearCache(true);
webView1.clearHistory();
webView2.setPersistentDrawingCache(ViewGroup.PERSISTENT_NO_CACHE);
webView2.clearView();
webView2.clearCache(true);
webView2.clearHistory();
Map<String, String> noCacheHeaders = new HashMap<String, String>(2);
noCacheHeaders.put("Pragma", "no-cache");
noCacheHeaders.put("Cache-Control", "no-cache");
// webView2.getSettings().setAppCacheMaxSize(0);
// webView2.getSettings().setAppCacheEnabled(false);
webView2.getSettings().setCacheMode(WebSettings.LOAD_NO_CACHE);
webView2.loadUrl(url, noCacheHeaders);
// webView2.loadUrl(url);
Log.d(TAG , "shouldOverrideUrlLoading(): fInitScale1: " + fInitScale1);
Log.d(TAG , "shouldOverrideUrlLoading(): fInitScale2: " + fInitScale2);
Log.d(TAG , "shouldOverrideUrlLoading(): webView1.getScale(): " + webView1.getScale());
Log.d(TAG , "shouldOverrideUrlLoading(): webView2.getScale(): " + webView2.getScale());
Log.d(TAG, "shouldOverrideUrlLoading(): ((LinearLayout.LayoutParams)webView2.getLayoutParams()).weight: " + ((LinearLayout.LayoutParams)webView2.getLayoutParams()).weight);
Log.d(TAG , "shouldOverrideUrlLoading(): url: " + url);
webView1.setInitialScale((int)fInitScale1);
webView2.setInitialScale((int)fInitScale2);
}
else
{
Log.d(TAG, "Other Web View triggered shouldOverrideUrlLoading");
}
return true;
}
});
webView2.setPersistentDrawingCache(ViewGroup.PERSISTENT_NO_CACHE);
webView2.clearView();
webView2.clearCache(true);
webView2.clearHistory();
// Map<String, String> noCacheHeaders = new HashMap<String, String>(2);
// noCacheHeaders.put("Pragma", "no-cache");
// noCacheHeaders.put("Cache-Control", "no-cache");
// webView2.getSettings().setAppCacheMaxSize(0);
// webView2.getSettings().setAppCacheEnabled(false);
webView2.getSettings().setCacheMode(WebSettings.LOAD_NO_CACHE);
// webView2.loadUrl("http://parents.xyz.com/activities/?catg=coloring", noCacheHeaders);
//webView2.loadUrl(sUrlBase + "/activities/?catg=coloring");
webView2.loadUrl(sUrlBase + "/activities/?catg=coloring&viewport_width="+fViewportSize2 + "&viewport_initial_scale=" + fInitScale1*0.01f);
//webView2.getSettings().setTextSize(t)
frameLayout0.addView(LinearLayout1);
LinearLayout LinearLayout2 = new LinearLayout(getApplicationContext());
LinearLayout.LayoutParams oLinearLayout2Params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT);
LinearLayout2.setLayoutParams(oLinearLayout2Params);
LinearLayout2.setWeightSum(100.0f);
LinearLayout2.setOrientation(LinearLayout.HORIZONTAL);
frameLayout0.addView(LinearLayout2);
ImageView btnInvisible0 = new ImageView(getApplicationContext());
LinearLayout.LayoutParams btnInvisible0Params = new LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, 90.0f);
btnInvisible0.setLayoutParams(btnInvisible0Params);
LinearLayout LinearLayout3 = new LinearLayout(getApplicationContext());
LinearLayout.LayoutParams oLinearLayout3Params = new LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, 10.0f);
LinearLayout3.setLayoutParams(oLinearLayout3Params);
LinearLayout3.setWeightSum(100.0f);
LinearLayout3.setOrientation(LinearLayout.VERTICAL);
btnBack = new ImageButton(getApplicationContext());
btnBack.setId(ID_BTNBACK);
LinearLayout.LayoutParams btnBackParams = new LinearLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, 0, 18.0f);
btnBack.setLayoutParams(btnBackParams);
btnBack.setScaleType(ScaleType.FIT_XY);
btnBack.post(new Runnable(){
#Override
public void run() {
// TODO Auto-generated method stub
btnBack.setBackgroundDrawable((Drawable)getResources().getDrawable(R.drawable.btn_back));
}
});
btnBack.setOnClickListener(OpenxyzActivitiesActivity.this);
ImageView btnInvisible1 = new ImageView(getApplicationContext());
LinearLayout.LayoutParams btnInvisible1Params = new LinearLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, 0, 82.0f);
btnInvisible1.setLayoutParams(btnInvisible1Params);
LinearLayout4 = new LinearLayout(getApplicationContext());
LinearLayout.LayoutParams oLinearLayout4Params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT);
LinearLayout4.setLayoutParams(oLinearLayout4Params);
LinearLayout4.setWeightSum(100.0f);
LinearLayout4.setOrientation(LinearLayout.HORIZONTAL);
LinearLayout4.setOnClickListener(OpenxyzActivitiesActivity.this);
frameLayout0.addView(LinearLayout4);
btnActivityUI1 = new ImageButton(getApplicationContext());
btnActivityUI1.setId(ID_BTNACTIVITY_UI1);
LinearLayout.LayoutParams btnActivityUI1Params = new LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, 37.5f);
btnActivityUI1.setLayoutParams(btnActivityUI1Params);
btnActivityUI1.setScaleType(ScaleType.FIT_CENTER);
btnActivityUI1.post(new Runnable(){
#Override
public void run() {
// TODO Auto-generated method stub
btnActivityUI1.setImageDrawable((Drawable)getResources().getDrawable(R.drawable.activityui_300));
}
});
btnActivityUI1.setOnClickListener(OpenxyzActivitiesActivity.this);
btnActivityUI2 = new ImageButton(getApplicationContext());
btnActivityUI2.setId(ID_BTNACTIVITY_UI2);
LinearLayout.LayoutParams btnActivityUI2Params = new LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT, 62.5f);
btnActivityUI2.setLayoutParams(btnActivityUI2Params);
btnActivityUI2.setScaleType(ScaleType.FIT_CENTER);
btnActivityUI2.post(new Runnable(){
#Override
public void run() {
// TODO Auto-generated method stub
btnActivityUI2.setImageDrawable((Drawable)getResources().getDrawable(R.drawable.activityui_500));
}
});
btnActivityUI2.setOnClickListener(OpenxyzActivitiesActivity.this);
LinearLayout4.addView(btnActivityUI1);
LinearLayout4.addView(btnActivityUI2);
LinearLayout3.addView(btnBack);
LinearLayout3.addView(btnInvisible1);
LinearLayout2.addView(btnInvisible0);
LinearLayout2.addView(LinearLayout3);
LinearLayout0.addView(linearLayoutDummyProduct);
LinearLayout0.addView(frameLayout0);
LinearLayout0.addView(linearLayoutDummyProduct2);
setContentView(LinearLayout0);
}
#Override
protected ArrayList<Float> doInBackground(String... params) {
// TODO Auto-generated method stub
Log.d(InitTask.class.getSimpleName() , "params[0]: " + params[0]);
return callHttp(params[0]);
}
private ArrayList<Float> callHttp(String url) {
ArrayList<Float> alResult = new ArrayList<Float>();
//BEGIN HTTP Request...
//String url = "http://parents.xyz.com/activities/sendActvtData.php";
Log.d(InitTask.class.getSimpleName() , "url: " + url);
HttpClient httpclient = new DefaultHttpClient();
// Prepare a request object
HttpGet httpget = new HttpGet(url);
Log.d(TAG , "CHK000");
// Execute the request
HttpResponse response;
String result = "";
try {
Log.d(TAG , "CHK010");
response = httpclient.execute(httpget);
// Examine the response status
Log.d(TAG , "CHK020");
Log.i(TAG,response.getStatusLine().toString());
Log.d(TAG , "CHK030");
// Get hold of the response entity
HttpEntity entity = response.getEntity();
// If the response does not enclose an entity, there is no need
// to worry about connection release
Log.d(TAG , "Is entity null? " + ((entity == null)?"YES":"NO") );
Log.d(TAG , "CHK040");
if (entity != null) {
// A Simple JSON Response Read
InputStream instream = entity.getContent();
result= convertStreamToString(instream);
// now you have the string representation of the HTML request
instream.close();
}
Log.d(TAG , "CHK100");
}
catch (ClientProtocolException cpe){
Log.e(TAG , cpe.getMessage());
}
catch (IOException ioe){
Log.e(TAG , ioe.getMessage());
}
catch (Exception e) {
Log.d(TAG , "CHK EXP");
Log.d(TAG, e.toString());
}
//END HTTP Request...
Log.d(TAG, "http result: " + result);
String[] ActvData = result.split(",");
fOrigWidth1 = Float.parseFloat(ActvData[0]);
fOrigWidth2 = Float.parseFloat(ActvData[1]);
Log.d(TAG, "fOrigWidth1: " + fOrigWidth1);
Log.d(TAG, "fOrigWidth2: " + fOrigWidth2);
//BEGIN: Setting up Window Size -- Calculation
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
screenwidth = metrics.widthPixels;
int screenheight = metrics.heightPixels;
Log.d(TAG, "screenwidth: " + screenwidth);
Log.d(TAG, "screenheight: " + screenheight);
//800x480
float heightFactor = (float)screenheight / 480f;
float widthFactor = (float)screenwidth / 800f;
float chosenFactor = 1f;
bChosenWidth = false;
if (widthFactor < heightFactor)
{
bChosenWidth = true;
chosenFactor = widthFactor;
}
else
{
bChosenWidth = false;
chosenFactor = heightFactor;
}
int newHeight = (int)((float)480.0 * (float)chosenFactor);
int newWidth = (int)((float)800.0 * (float)chosenFactor);
Log.d(TAG, "heightFactor: " + heightFactor);
Log.d(TAG, "widthFactor: " + widthFactor);
Log.d(TAG, "newHeight: " + newHeight);
Log.d(TAG, "newWidth: " + newWidth);
delta = 0;
if (bChosenWidth)
{
delta = (screenheight - newHeight) / 2;
}
else{
delta = (screenwidth - newWidth) / 2;
}
Log.d(TAG, "delta: " + delta);
//END: Setting up Window Size -- Calculation
fWindowWidth = newWidth;
fWindowHeight = newHeight;
fWindowDensity = (float)metrics.density;
fWeight1 = fOrigWidth1;
fWeight2 = fOrigWidth2;
fWeightSum = fWeight1 + fWeight2;
Log.d(TAG , "fWeight1: " + fWeight1);
Log.d(TAG , "fWeight2: " + fWeight2);
fInitScale1 = 0.0f;
fInitScale2 = 0.0f;
fInitScale1 = fInitScale2 = fWindowWidth / fWeightSum * 100;
Log.d(TAG , "fWindowWidth: " + fWindowWidth);
Log.d(TAG , "fWindowHeight: " + fWindowHeight);
Log.d(TAG , "fWindowDensity: " + fWindowDensity);
Log.d(TAG , "fInitScale1: " + fInitScale1);
Log.d(TAG , "fInitScale2: " + fInitScale2);
Log.d(TAG , "CHK200");
return alResult;
}
private String convertStreamToString(InputStream is) {
/*
* To convert the InputStream to String we use the BufferedReader.readLine()
* method. We iterate until the BufferedReader return null which means
* there's no more data to read. Each line will appended to a StringBuilder
* and returned as String.
*/
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
StringBuilder sb = new StringBuilder();
String line = null;
try {
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return sb.toString();
}
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch(v.getId()){
case ID_BTNBACK:
Intent myIntent = new Intent();
setResult(Activity.RESULT_OK, myIntent);
finish();
break;
case ID_BTNACTIVITY_UI1: case ID_BTNACTIVITY_UI2:
LinearLayout4.setClickable(false);
frameLayout0.removeView(LinearLayout4);
break;
}
}
}

Resources