Android Studio Opengl ES get problem with weird color mixturing - android-studio

Basically I've created a plane and rotate it 5 times to be a cube. I made cube with different colors of each side. And did some rotation with touch event.
Everthing was good so far, but the cube turned out to be like this.
Please help it's been driving me crazy!
My cube code:
public class PhotoCube{
public ArrayList<FloatBuffer> vertexBufferList = new ArrayList<>();
public int[][] lists = {
{0,1,0,0}, //front
{1,0,0,-90}, //top
{0,1,0,-90}, //left
{0,1,0,90}, //
{1,0,0,90}, //bottom
{0,1,0,180} //right
};
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static final float[] coords = { // in counterclockwise order:
0.5f, 0.5f, 0.5f, // top right
-0.5f, 0.5f, 0.5f, // top left
-0.5f, -0.5f, 0.5f, // bottom left
0.5f, -0.5f, 0.5f // bottom right
};
static final float[][] colorList = {
{1f,0f,0f,1f},
{0f,1f,0f,1f},
{1f,1f,1f,1f},
{1f,1f,0f,1f},
{1f,0f,1f,1f},
{0f,1f,1f,1f}
};
public PhotoCube() {
final int maxVertices = 6;
for(int[] list:lists){
float[] currentCoords = coords.clone();
for(int i=0;i<12;i+=3){
float x = coords[i];
float y = coords[i+1];
float z = coords[i+2];
double angle = Math.toRadians(list[3]);
currentCoords[i]=(float) ((list[0]==1)?x:(list[1]==1)?x*Math.cos(angle)+z*Math.sin(angle):x*Math.cos(angle)-y*Math.sin(angle));
currentCoords[i+1]=(float) ((list[0]==1)?y*Math.cos(angle)-z*Math.sin(angle):(list[1]==1)?y:x*Math.sin(angle)+y*Math.cos(angle));
currentCoords[i+2]=(float) ((list[0]==1)?z*Math.cos(angle)+y*Math.sin(angle):(list[1]==1)?z*Math.cos(angle)-x*Math.sin(angle):z);
}
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
currentCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
FloatBuffer vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(currentCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
if(vertexBufferList.size()==maxVertices){
vertexBufferList.remove(0);
}
vertexBufferList.add(vertexBuffer);
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
createProgram();
GLES20.glLinkProgram(mProgram);
// creates OpenGL ES program executables
}
}
public void createProgram(){
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
}
public void draw(int order) {
final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
int positionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(positionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(positionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, true,
vertexStride, vertexBufferList.get(order));
// get handle to fragment shader's vColor member
int colorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(colorHandle, 1, colorList[order], 0);
GLES20.glDrawElements(
GLES20.GL_TRIANGLES,
drawOrder.length,
GL_UNSIGNED_SHORT,
drawListBuffer);
// Disable vertex array
//GLES20.glDisableVertexAttribArray(positionHandle);
}
}
My Renderer code:
public class MyGLRenderer implements GLSurfaceView.Renderer {
private PhotoCube mPhotoCube;
public final float[] vPMatrix = new float[16];
private final float[] projectionMatrix = new float[16];
private final float[] viewMatrix = new float[16];
private int vPMatrixHandle = -1;
private volatile float mAngleX = 0;
private volatile float mAngleY = 0;
private float[] rotationMX = new float[16];
private float[] rotationMY = new float[16];
private float[] scratch = new float[16];
public MyGLRenderer(Context context){
}
public static int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
mPhotoCube = new PhotoCube();
vPMatrixHandle = GLES20.glGetUniformLocation(mPhotoCube.mProgram, "uVPMatrix");
onDrawFrame(unused);
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFuncSeparate(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA, GLES20.GL_ZERO, GLES20.GL_ONE);
Matrix.setRotateM(rotationMX, 0, -mAngleX, 0, 1, 0);
Matrix.setLookAtM(viewMatrix, 0, 0, 0, 5, 0f, 0f, -5f, 0f, 1.0f, 0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(vPMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
Matrix.multiplyMM(scratch, 0, vPMatrix, 0, rotationMX, 0);
Matrix.setRotateM(rotationMY, 0, -mAngleY, scratch[0], scratch[4], scratch[8]);
Matrix.multiplyMM(scratch, 0, scratch, 0, rotationMY, 0);
vPMatrixHandle = GLES20.glGetUniformLocation(mPhotoCube.mProgram, "uMVPMatrix");
GLES20.glUniformMatrix4fv(vPMatrixHandle, 1, false, scratch, 0);
for(int i=0;i<mPhotoCube.lists.length;i++){
mPhotoCube.draw(i);
}
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(projectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
}
Here's my shader code
final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
"}";
final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
Thank you in advance.

You have to enable the Depth Test Depth test ensures that fragments that lie behind other fragments are discarded:
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
When you enable the depth test you must also clear the depth buffer:
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
However, if you have transparent objects and want to use Blending, the depth test must be disabled and you must draw the triangles of the meshes in sorted order from back to front. Also see OpenGL depth sorting

Related

Simple 3D Shape faces not rendering as expected - OpenGL ES in Android Studio

I am trying to make a rotating octahedron display correctly, I have successfully achieved other shapes such as a cube and tetrahedron, but I am experiencing some difficulty with this one.
Here is the simple obj file I am using:
v 0 -1 0
v 1 0 0
v 0 0 1
v -1 0 0
v 0 1 0
v 0 0 -1
#
f 1 2 3
f 4 1 3
f 5 4 3
f 2 5 3
f 2 1 6
f 1 4 6
f 4 5 6
f 5 2 6
My code is as follows:
class Shape(context: Context) {
private var mProgram: Int = 0
// Use to access and set the view transformation
private var mMVPMatrixHandle: Int = 0
//For Projection and Camera Transformations
private var vertexShaderCode = (
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
//"attribute vec4 vColor;" +
//"varying vec4 vColorVarying;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
//"vColorVarying = vColor;"+
"}")
private var fragmentShaderCode = (
"precision mediump float;" +
"uniform vec4 vColor;" +
//"varying vec4 vColorVarying;"+
"void main() {" +
//" gl_FragColor = vColorVarying;" +
" gl_FragColor = vColor;" +
"}")
internal var shapeColor = arrayOf<FloatArray>(
//front face (grey)
floatArrayOf(0f, 0f, 0f, 1f), //black
floatArrayOf(0f, 0f, 1f, 1f),
floatArrayOf(0f, 1f, 0f, 1f),
floatArrayOf(1f, 0f, 0f, 1f), // red
floatArrayOf(1f, 1f, 0f, 1f),
floatArrayOf(1f, 0f, 1f, 1f),
floatArrayOf(1f, 0f, 1f, 1f),
floatArrayOf(0f, 1f, 1f, 1f)
)
private var mPositionHandle: Int = 0
private var mColorHandle: Int = 0
// var objLoader = ObjLoader(context, "tetrahedron.txt")
// var objLoader = ObjLoader(context, "cube.txt")
var objLoader = ObjLoader(context, "octahedron.txt")
var shapeCoords: FloatArray
var numFaces: Int = 0
var vertexBuffer: FloatBuffer
var drawOrder: Array<ShortArray>
lateinit var drawListBuffer: ShortBuffer
init {
//assign coordinates and order in which to draw them (obtained from obj loader class)
shapeCoords = objLoader.vertices.toFloatArray()
drawOrder = objLoader.faces.toTypedArray()
numFaces = objLoader.numFaces
// initialize vertex byte buffer for shape coordinates
val bb = ByteBuffer.allocateDirect(
// (# of coordinate varues * 4 bytes per float)
shapeCoords.size * 4
)
bb.order(ByteOrder.nativeOrder())
vertexBuffer = bb.asFloatBuffer()
vertexBuffer.put(shapeCoords)
vertexBuffer.position(0)
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram()
val vertexShader = loadShader(
GLES20.GL_VERTEX_SHADER,
vertexShaderCode
)
val fragmentShader = loadShader(
GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode
)
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader)
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader)
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram)
}
var vertexStride = COORDS_PER_VERTEX * 4 // 4 bytes per vertex
fun draw(mvpMatrix: FloatArray) { // pass in the calculated transformation matrix
for (face in 0 until numFaces) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram)
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition")
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor")
// Enable a handle to the cube vertices
GLES20.glEnableVertexAttribArray(mPositionHandle)
// Prepare the cube coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer
)
GLES20.glUniform4fv(mColorHandle, 1, shapeColor[face], 0)
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix")
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0)
// initialize byte buffer for the draw list
var dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder[face].size * 2
)
dlb.order(ByteOrder.nativeOrder())
drawListBuffer = dlb.asShortBuffer()
drawListBuffer.put(drawOrder[face])
drawListBuffer.position(0)
GLES20.glDrawElements(
GLES20.GL_TRIANGLES,
dlb.capacity(),
GLES20.GL_UNSIGNED_SHORT,
drawListBuffer //position indices
)
}
// Disable vertex array
GLES20.glDisableVertexAttribArray(mMVPMatrixHandle)
}
companion object {
// number of coordinates per vertex in this array
internal var COORDS_PER_VERTEX = 3
}
}
class MyGLRenderer1(val context: Context) : GLSurfaceView.Renderer {
private lateinit var mShape: Shape
#Volatile
var mDeltaX = 0f
#Volatile
var mDeltaY = 0f
#Volatile
var mTotalDeltaX = 0f
#Volatile
var mTotalDeltaY = 0f
private val mMVPMatrix = FloatArray(16)
private val mProjectionMatrix = FloatArray(16)
private val mViewMatrix = FloatArray(16)
private val mRotationMatrix = FloatArray(16)
private val mAccumulatedRotation = FloatArray(16)
private val mCurrentRotation = FloatArray(16)
private val mTemporaryMatrix = FloatArray(16)
override fun onDrawFrame(gl: GL10?) {
// Redraw background color
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
val scratch = FloatArray(16)
// Create a rotation transformation for the square
Matrix.setIdentityM(mRotationMatrix, 0)
Matrix.setIdentityM(mCurrentRotation, 0)
Matrix.rotateM(mCurrentRotation, 0, mDeltaX, 0.0f, 1.0f, 0.0f)
// Matrix.rotateM(mCurrentRotation, 0, mDeltaY, 1.0f, 0.0f, 0.0f)
// Multiply the current rotation by the accumulated rotation, and then set the accumulated
// rotation to the result.
Matrix.multiplyMM(
mTemporaryMatrix,
0,
mCurrentRotation,
0,
mAccumulatedRotation,
0
)
System.arraycopy(mTemporaryMatrix, 0, mAccumulatedRotation, 0, 16)
// Rotate the cube taking the overall rotation into account.
Matrix.multiplyMM(
mTemporaryMatrix,
0,
mRotationMatrix,
0,
mAccumulatedRotation,
0
)
System.arraycopy(mTemporaryMatrix, 0, mRotationMatrix, 0, 16)
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 2f, 2f, -5f, 0f, 0f, 0f, 0f, 1.0f, 0.0f)
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0)
// Combine the rotation matrix with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0)
gl?.glDisable(GL10.GL_CULL_FACE)
// Draw shape
mShape.draw(scratch)
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height);
val ratio: Float = width.toFloat() / height.toFloat()
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1.0f, 1.0f, 3.0f, 7.0f)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
GLES20.glEnable(GLES20.GL_DEPTH_TEST)
// initialize a square
mShape = Shape(context)
// Initialize the accumulated rotation matrix
Matrix.setIdentityM(mAccumulatedRotation, 0)
}
}
fun loadShader(type: Int, shaderCode: String): Int {
return GLES20.glCreateShader(type).also { shader ->
GLES20.glShaderSource(shader, shaderCode)
GLES20.glCompileShader(shader)
}
}
class ObjLoader(context: Context, file: String) {
var numFaces: Int = 0
var vertices = Vector<Float>()
var normals = Vector<Float>()
var textures = Vector<Float>()
var faces = mutableListOf<ShortArray>()
init {
val reader: BufferedReader
val isr = InputStreamReader(context.assets.open(file))
reader = BufferedReader(isr)
var line = reader.readLine()
// read file until EOF
while (line != null) {
val parts = line.split((" ").toRegex()).dropLastWhile({ it.isEmpty() }).toTypedArray()
when (parts[0]) {
"v" -> {
var part1 = parts[1].toFloat()
var part2 = parts[2].toFloat()
var part3 = parts[3].toFloat()
// vertices
vertices.add(part1)
vertices.add(part2)
vertices.add(part3)
}
"vt" -> {
// textures
textures.add(parts[1].toFloat())
textures.add(parts[2].toFloat())
}
"vn" -> {
// normals
normals.add(parts[1].toFloat())
normals.add(parts[2].toFloat())
normals.add(parts[3].toFloat())
}
"f" -> {
// faces: vertex/texture/normal
faces.add(shortArrayOf(parts[1].toShort(), parts[2].toShort(), parts[3].toShort()))
println("dbg: points are "+ parts[1]+" "+parts[2]+" "+parts[3])
}
}
line = reader.readLine()
}
numFaces = faces.size
}}
The shape produced can be seen in the following screenshots, it is also visible on the black surface that there is possibly some sort of z fighting taking place? The black triangle flickers red and yellow:
Sometimes the following shapes are produced, flickering in and out of existence, in different colours:
Any help is much appreciated, thanks in advance.
Edit:
I have managed to make the vertices plot correctly thanks to the below answer however there is still this flickering going on, I really appreciate the help.
Array indices start at 0, but Wavefront (.obj) indices start at 1:
faces.add(shortArrayOf(parts[1].toShort(), parts[2].toShort(), parts[3].toShort()))
faces.add(shortArrayOf(
parts[1].toShort()-1, parts[2].toShort()-1, parts[3].toShort()-1))

LibGDX draws dots instead of lines

I would like to implement some kind of eraser,so in my render method I make the upper layer transparent.
#Override
public void render () {
cam.update();
Gdx.gl.glClearColor(1, 1, 1,1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
if (Gdx.input.isTouched()) {
pos.set(Gdx.input.getX(), Gdx.input.getY(), 0);
pixmap.setColor(new Color(1, 1, 1, 0.5f)); //transparency
pixmap.fillCircle((int)pos.x, (int)pos.y, 10);
}
texture3.draw(pixmap, 0, 0);
batch.begin();
batch.draw(texture, 0, 0);
batch.draw(texture3, 0, 0);
batch.end();
}
But I got points when make swipes. It requires to do very slow speed to make lines instead of dots.
So I expect continuous line instead of dots.
Can you advice something please?
Dots instead of line
This is caused because of the frequency at which the input state is updated, the solution here would be to manually calculate the missing points needed to make a line, you could do this with a linear interpolation between each pair of dots, additionally you could calculate how many extra dots are necessary depending on how far is the newest dot from the previous one, in my example I use an arbitrary number of extra dots (20) like so:
public class TestDraw extends Game {
private Pixmap pixmap;
private Texture texture;
private SpriteBatch batch;
private Vector2 lastPos;
#Override
public void create() {
pixmap = new Pixmap(1000, 1000, Pixmap.Format.RGBA8888);
texture = new Texture(pixmap);
batch = new SpriteBatch();
lastPos = new Vector2();
}
#Override
public void render() {
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
if (Gdx.input.isTouched()) {
pixmap.setColor(new Color(1, 1, 1, 0.5f)); //transparency
int newX = Gdx.input.getX();
int newY = Gdx.input.getY();
pixmap.setColor(Color.RED);
pixmap.fillCircle(newX, newY, 10);
// If the distance is too far, fill with extra dots
if (lastPos.dst(newX, newY) > 10) { // Here far is 10, you can adjust as needed
int extraDots = 20; // How many extra dots to draw a line, I use 20, adjust as needed or calculate according to distance (for example lastPos.dst(newX,newY) * 5)
for (int i = 0; i < extraDots; i++) {
float progress = (1f / extraDots) * i;
int dotX = (int) MathUtils.lerp(lastPos.x, newX, progress);
int dotY = (int) MathUtils.lerp(lastPos.y, newY, progress);
pixmap.setColor(Color.BLUE);
pixmap.fillCircle(dotX, dotY, 10);
}
}
// Store last position for next render() call
lastPos.set(newX, newY);
}
texture.draw(pixmap, 0, 0);
batch.begin();
batch.draw(texture, 0, 0);
batch.end();
}
}
Adecuate to your code as needed, I didn't know what was texture3 so I didn't include in my example
Also another option which I don't like too much because of rendering and storage cost is using a Polygon to draw the lines.

Zoom multiple sprites, individually on touch-Andengine

I am very new to andEngine, I want to add sprites on screen and let them move and zoom on finger touch.
Right now i am able to add multiple sprites on scene , and they can be dragged on touch.
Here is my code:
public class Main extends SimpleBaseGameActivity {
#Override
private Camera camera;
private BitmapTextureAtlas mBitmapTextureAtlas;
private ITextureRegion mFaceTextureRegion;
private ITextureRegion mFaceTextureRegion2;
private static final int CAMERA_WIDTH = 800;
private static final int CAMERA_HEIGHT = 480;
#Override
public EngineOptions onCreateEngineOptions() {
camera = new Camera(0, 0, CAMERA_WIDTH, CAMERA_HEIGHT);
EngineOptions engineOptions = new EngineOptions(true,
ScreenOrientation.LANDSCAPE_FIXED, new RatioResolutionPolicy(
CAMERA_WIDTH, CAMERA_HEIGHT), camera);
return engineOptions;
}
#Override
protected void onCreateResources() {
BitmapTextureAtlasTextureRegionFactory.setAssetBasePath("gfx/");
this.mBitmapTextureAtlas = new BitmapTextureAtlas(
this.getTextureManager(), 1024, 1600, TextureOptions.NEAREST);
// background
// this.background = new Sprite(0, 0,
// BitmapTextureAtlasTextureRegionFactory.createTiledFromAsset(this.mBitmapTextureAtlas,
// this, "ui_ball_1.png", 0, 0, 1, 1),
// this.getVertexBufferObjectManager());
this.mFaceTextureRegion = BitmapTextureAtlasTextureRegionFactory
.createFromAsset(this.mBitmapTextureAtlas, this,
"ui_ball_1.png", 0, 0);
this.mFaceTextureRegion2 = BitmapTextureAtlasTextureRegionFactory
.createFromAsset(this.mBitmapTextureAtlas, this,
"ui_ball_1.png", 0, 0);
this.mBitmapTextureAtlas.load();
// this.mEngine.getTextureManager().loadTexture(this.mBitmapTextureAtlas);
/*
* this.mBitmapTextureAtlas = new
* BitmapTextureAtlas(this.getTextureManager(), 32, 32,
* TextureOptions.BILINEAR); this.mFaceTextureRegion =
* BitmapTextureAtlasTextureRegionFactory
* .createFromAsset(this.mBitmapTextureAtlas, this, "ui_ball_1.png", 0,
* 0); this.mBitmapTextureAtlas.load();
*/
}
#Override
protected Scene onCreateScene() {
/*
* this.scene = new Scene(); this.scene.attachChild(this.background);
* this.scene.setBackground(new Background(0.09804f, 0.6274f, 0.8784f));
* return this.scene;
*/
this.mEngine.registerUpdateHandler(new FPSLogger());
final Scene scene = new Scene();
scene.setBackground(new Background(0.09804f, 0.6274f, 0.8784f));
final float centerX = (CAMERA_WIDTH - this.mFaceTextureRegion
.getWidth()) / 2;
final float centerY = (CAMERA_HEIGHT - this.mFaceTextureRegion
.getHeight()) / 2;
final Sprite face = new Sprite(centerX, centerY,
this.mFaceTextureRegion, this.getVertexBufferObjectManager()) {
#Override
public boolean onAreaTouched(final TouchEvent pSceneTouchEvent,
final float pTouchAreaLocalX, final float pTouchAreaLocalY) {
this.setPosition(pSceneTouchEvent.getX() - this.getWidth() / 2,
pSceneTouchEvent.getY() - this.getHeight() / 2);
return true;
}
};
face.setScale(2);
scene.attachChild(face);
scene.registerTouchArea(face);
final Sprite face2 = new Sprite(0, 0, this.mFaceTextureRegion2,
this.getVertexBufferObjectManager()) {
#Override
public boolean onAreaTouched(final TouchEvent pSceneTouchEvent,
final float pTouchAreaLocalX, final float pTouchAreaLocalY) {
this.setPosition(pSceneTouchEvent.getX() - this.getWidth() / 2,
pSceneTouchEvent.getY() - this.getHeight() / 2);
return true;
}
};
face2.setScale(2);
scene.attachChild(face2);
scene.registerTouchArea(face2);
scene.setTouchAreaBindingOnActionDownEnabled(true);
return scene;
}
}
Now i want to zoom each sprite on touch, but unable to find such method like setPosition available to move sprite to a specific position. Can anyone help me in achieving this without affecting the current functionality. Any help would be appreciated, may be in form of code or some direction/method to do this.
Thanks in advance :)
you can use a EntityModifier to make your effect:
#Override
public boolean onAreaTouched(final TouchEvent pSceneTouchEvent,
final float pTouchAreaLocalX, final float pTouchAreaLocalY) {
this.setPosition(pSceneTouchEvent.getX() - this.getWidth() / 2,
pSceneTouchEvent.getY() - this.getHeight() / 2);
this.clearEntityModifiers();
this.RegisterEntityModifier(new ScaleModifier(1f,2f,4f));
//1f = time to convert the scale of sprite of 2f to 4f
//2f = initial scale
//4f = finish scale
//when you dont touch the sprite back to normal scale
if(event.getAction()== MotionEvent.ACTION_UP) {
this.clearEntityModifiers();
this.RegisterEntityModifier(new ScaleModifier(1f,4f,2f));
}
//you also can work with "MotionEvent.ACTION_DOWN" and
//MotionEvent.ACTION_MOVE
return true;
}

Screen-space square looking distorted in PIX

I have a simple function that creates a square that covers the entire screen, I use it for applying post-processing effects, however as far as I can tell it has been the cause of countless errors.
When I run my code in PIX I get the following mesh, but the square should be straight and covering the screen, shouldn't it?
My vertex shader does no transformation and simply passes position information to the pixel shader.
The function that creates the square is as follows:
private void InitializeGeometry()
{
meshes = new Dictionary<Vector3, Mesh>();
//build array of vertices for one square
ppVertex[] vertexes = new ppVertex[4];
//vertexes[0].Position = new Vector3(-1f, -1f, 0.25f);
vertexes[0].Position = new Vector3(-1, -1, 1f);
vertexes[1].Position = new Vector3(-1, 1, 1f);
vertexes[2].Position = new Vector3(1, -1, 1f);
vertexes[3].Position = new Vector3(1, 1, 1f);
vertexes[0].TexCoords = new Vector2(0, 0);
vertexes[1].TexCoords = new Vector2(0, 1);
vertexes[2].TexCoords = new Vector2(1, 0);
vertexes[3].TexCoords = new Vector2(1, 1);
//build index array for the vertices to build a quad from two triangles
short[] indexes = { 0, 1, 2, 1, 3, 2 };
//create the data stream to push the vertex data into the buffer
DataStream vertices = new DataStream(Marshal.SizeOf(typeof(Vertex)) * 4, true, true);
//load the data stream
vertices.WriteRange(vertexes);
//reset the data position
vertices.Position = 0;
//create the data stream to push the index data into the buffer
DataStream indices = new DataStream(sizeof(short) * 6, true, true);
//load the data stream
indices.WriteRange(indexes);
//reset the data position
indices.Position = 0;
//create the mesh object
Mesh mesh = new Mesh();
//create the description of the vertex buffer
D3D.BufferDescription vbd = new BufferDescription();
vbd.BindFlags = D3D.BindFlags.VertexBuffer;
vbd.CpuAccessFlags = D3D.CpuAccessFlags.None;
vbd.OptionFlags = ResourceOptionFlags.None;
vbd.SizeInBytes = Marshal.SizeOf(typeof(Vertex)) * 4;
vbd.Usage = ResourceUsage.Default;
//create and assign the vertex buffer to the mesh, filling it with data
mesh.VertexBuffer = new D3D.Buffer(device, vertices, vbd);
//create the description of the index buffer
D3D.BufferDescription ibd = new BufferDescription();
ibd.BindFlags = D3D.BindFlags.IndexBuffer;
ibd.CpuAccessFlags = D3D.CpuAccessFlags.None;
ibd.OptionFlags = ResourceOptionFlags.None;
ibd.SizeInBytes = sizeof(short) * 6;
ibd.Usage = ResourceUsage.Default;
//create and assign the index buffer to the mesh, filling it with data
mesh.IndexBuffer = new D3D.Buffer(device, indices, ibd);
//get vertex and index counts
mesh.vertices = vertexes.GetLength(0);
mesh.indices = indexes.Length;
//close the data streams
indices.Close();
vertices.Close();
meshes.Add(new Vector3(0), mesh);
}
and when I render the square:
private void DrawScene()
{
lock (meshes)
{
foreach (Mesh mesh in meshes.Values)
{
if (mesh.indices > 0)
{
try
{
//if (camera.SphereInFrustum(mesh.BoundingSphere, sphereRadius))
//{
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(mesh.VertexBuffer, Marshal.SizeOf(typeof(Vertex)), 0));
context.InputAssembler.SetIndexBuffer(mesh.IndexBuffer, Format.R16_UInt, 0);
context.DrawIndexed(mesh.indices, 0, 0);
//}
}
catch (Exception err)
{
MessageBox.Show(err.Message);
}
}
}
}
}
EDIT: I've added the vertex shader being run
cbuffer EveryFrame : register(cb0)
{
float3 diffuseColor : packoffset(c0);
float3 lightdir : packoffset(c1);
};
cbuffer EveryMotion : register(cb1)
{
float4x4 WorldViewProjection : packoffset(c0);
float4x4 LightWorldViewProjection : packoffset(c4);
};
struct VS_IN
{
float3 position : POSITION;
float3 normal : NORMAL;
float4 col : TEXCOORD;
};
struct PS_IN
{
float4 position : SV_POSITION;
float4 col : TEXCOORD;
float3 normal : NORMAL;
};
PS_IN VS(VS_IN input)
{
PS_IN output;
output.position = float4(input.position,1);
output.col = input.col;
output.normal = input.normal;
return output;
}
Here's PIX's vertex output.
PreVS:
PostVS:
And here's the dissassembly PIX generated when I chose to debug vertex 0
//
// Generated by Microsoft (R) HLSL Shader Compiler 9.29.952.3111
//
//
//
// Input signature:
//
// Name Index Mask Register SysValue Format Used
// ---------------- ----- ------ -------- -------- ------ ------
// POSITION 0 xyz 0 NONE float xyz
// NORMAL 0 xyz 1 NONE float xyz
// TEXCOORD 0 xyzw 2 NONE float
//
//
// Output signature:
//
// Name Index Mask Register SysValue Format Used
// ---------------- ----- ------ -------- -------- ------ ------
// SV_POSITION 0 xyzw 0 POS float xyzw
// TEXCOORD 0 xyzw 1 NONE float xyzw
// NORMAL 0 xyz 2 NONE float xyz
//
vs_4_0
dcl_input v0.xyz
dcl_input v1.xyz
dcl_output_siv o0.xyzw , position
dcl_output o1.xyzw
dcl_output o2.xyz
mov o0.xyz, v0.xyzx
mov o0.w, l(1.000000)
mov o1.xyzw, l(1.000000, 1.000000, 1.000000, 1.000000)
mov o2.xyz, v1.xyzx
ret
// Approximately 5 instruction slots used
I've also added the input assembler:
private void SetPPInputAssembler(Shader shader)
{
InputElement[] elements = new[] {
new InputElement("POSITION",0,Format.R32G32B32_Float,0),
new InputElement("NORMAL",0,Format.R32G32B32_Float,12,0),
new InputElement("TEXCOORD",0,Format.R32G32_Float,24,0),
};
InputLayout layout = new InputLayout(device, shader.InputSignature, elements);
context.InputAssembler.InputLayout = layout;
context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList;
}
Obviously your vertex input positions don't match the values you want to give in.
For the first vertex the values look good until the z-coordinate of the texture coordinates.
You are defining a Vector2D in your program Vertex-struct, but a Vector4D in the Vertexshader Vertex-struct and things get mixed up.
just change VS_IN to this:
struct VS_IN
{
float3 position : POSITION;
float3 normal : NORMAL;
float2 col : TEXCOORD; // float2 instead of float4
};
I'm not sure though if you really want to have colors or rather texcoords. If you really want to have colors float4 would be right, but then you had to change
vertexes[0].TexCoords = new Vector2(0, 0);
into
vertexes[0].TexCoords = new Vector4(0, 0, 0, 0);
Either way, one of those variables is misnamed and probably the reason for the confusion.

Why do my OpenGL object get rendered with the same texture?

UPDATE : By the help of #datenwolf I know that the return value of gluBuild2DMipmaps is not the pointer to the texture, instead it's only an error code. I forgot to call glGenTextures and glBindTexture. Look in the method LoadTextureRaw in this answer
I have a problem when rendering multiple object, which each having their own Texture file definition, that is, they all draw the same texture. I create a class hierarchy, CDrawObject->CBall. In the CDrawObject, I define this :
public ref class CDrawObject
{
protected:
BYTE * dataTexture;
GLuint * texture;
public:
String ^ filename;
CDrawObject(void);
virtual void draw();
void LoadTextureRaw();
};
In the LoadTextureDraw(), I define this:
void CDrawObject::LoadTextureRaw()
{
//GLuint texture;
if(!filename) return;
if(filename->Equals("")) return;
texture = new GLuint;
System::Drawing::Bitmap ^ bitmap = gcnew Bitmap(filename);
int h = bitmap->Height;
int w = bitmap->Width;
int s = w * h;
dataTexture = new BYTE[s * 3];
System::Drawing::Rectangle rect = System::Drawing::Rectangle(0,0,w,h);
System::Drawing::Imaging::BitmapData ^ bitmapData =
bitmap->LockBits(rect,System::Drawing::Imaging::ImageLockMode::ReadWrite , System::Drawing::Imaging::PixelFormat::Format24bppRgb);
::memcpy(dataTexture,bitmapData->Scan0.ToPointer(),s*3);
/* old code
bitmap->UnlockBits(bitmapData);
pin_ptr<GLuint*> pt = &texture;//pin managed pointer, to be unmanaged
**pt = gluBuild2DMipmaps(GL_TEXTURE_2D, 3, w,h,GL_BGR_EXT, GL_UNSIGNED_BYTE, dataTexture);
*/
//new code : working fine this way. I forgot to call glGenTextures and glBindTexture
bitmap->UnlockBits(bitmapData);
pin_ptr<GLuint*> pt = &texture;//pin managed pointer, to be unmanaged... a must here :)
glEnable(GL_TEXTURE_2D);
glGenTextures(1,*pt);
glBindTexture(GL_TEXTURE_2D,**pt);
gluBuild2DMipmaps(GL_TEXTURE_2D, GL_RGB, w,h,GL_BGR_EXT, GL_UNSIGNED_BYTE, dataTexture);
}
And as the CBall:draw itself, I define this :
void CBall::draw(){
glLoadIdentity();
if(texture!=NULL && !filename->Equals(""))
{
glEnable(GL_TEXTURE_2D);
pin_ptr<GLuint*> pt = &texture;
glBindTexture(GL_TEXTURE_2D,**pt);
}
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
glShadeModel(GL_SMOOTH);
glEnable(GL_DEPTH_TEST);
glEnable(GL_NORMALIZE);
glTranslatef(this->x,this->y,this->z);
glRotatef(this->sudut_rotasi_x,1,0,0);
glRotatef(this->sudut_rotasi_y,0,1,0);
glRotatef(this->sudut_rotasi_z,0,0,1);
glScalef(this->x_scale,this->y_scale,this->z_scale);
GLUquadricObj *q = gluNewQuadric();
gluQuadricNormals(q, GL_SMOOTH);
gluQuadricTexture(q, GL_TRUE);
gluSphere(q, r, 32, 16);
glFlush();
glDisable(GL_TEXTURE_2D);
}
The problem is, when I draw two (or more) ball object, they all drawn using the same texture. I already debug the code, and for each object, they all have different texture variable. Here is a snapshot of my code that draw those balls :
...
CBall ^ ball = gcnew CBall();
ball->x=Convert::ToSingle(r->GetAttribute("x"));
ball->y=Convert::ToSingle(r->GetAttribute("y"));
ball->z=Convert::ToSingle(r->GetAttribute("z"));
ball->r=Convert::ToSingle(r->GetAttribute("r"));
ball->filename=r->GetAttribute("filename");
ball->LoadTextureRaw();
addGraphic(id, ball);
...
Those code were called from a read XML file method.
What did I do wrong with this OpenGL Code?
Your problem is, that gluBuild2DMipmaps doesn't return the texture name, but a error code. You need to create a texture name separately.
Try this:
public ref class CDrawObject
{
protected:
GLuint texture; // just a GLuint, not a pointer!
public:
String ^ filename;
CDrawObject(void);
virtual void draw();
void LoadTextureRaw();
};
Change LoadTextureRaw a bit:
void CDrawObject::LoadTextureRaw()
{
if(!filename)
return;
if(filename->Equals(""))
return;
System::Drawing::Bitmap ^ bitmap = gcnew Bitmap(filename);
int h = bitmap->Height;
int w = bitmap->Width;
int s = w * h;
System::Drawing::Rectangle rect = System::Drawing::Rectangle(0,0,w,h);
System::Drawing::Imaging::BitmapData ^ bitmapData =
bitmap->LockBits( rect,
System::Drawing::Imaging::ImageLockMode::ReadWrite,
System::Drawing::Imaging::PixelFormat::Format24bppRgb );
// This is the important part: We generate a texture name and...
glGenTextures(1, &texture); // this should not require a pin_ptr, after all were in the middle of a member function of the class, so the garbage collector will not kick in.
// ...bind it, causing creation of a (yet uninitialized) texture object
glBindTexture(GL_TEXTURE_2D, texture);
GLint error = gluBuild2DMipmaps(
GL_TEXTURE_2D,
GL_RGB, // this should be a valid OpenGL token, not the number of components!
w, h,
GL_BGR_EXT, GL_UNSIGNED_BYTE,
bitmapData->Scan0.ToPointer() );
bitmap->UnlockBits(bitmapData);
}
Finally draw (which I rearranged a little)
void CBall::draw(){
glLoadIdentity();
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
glShadeModel(GL_SMOOTH);
glEnable(GL_DEPTH_TEST);
glEnable(GL_NORMALIZE);
glTranslatef(this->x,this->y,this->z);
glRotatef(this->sudut_rotasi_x,1,0,0);
glRotatef(this->sudut_rotasi_y,0,1,0);
glRotatef(this->sudut_rotasi_z,0,0,1);
glScalef(this->x_scale,this->y_scale,this->z_scale);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texture);
GLUquadricObj *q = gluNewQuadric();
gluQuadricNormals(q, GL_SMOOTH);
gluQuadricTexture(q, GL_TRUE);
gluSphere(q, r, 32, 16);
// glFlush is not required
glDisable(GL_TEXTURE_2D);
}

Resources