How to develop OpenGL ES (GLES) 2.0 applications on Linux? - linux

I would like to develop OpenGL ES 2.0 apps on my Ubuntu machine. I could not find any libraries/emulators that support GLES 2.0 yet. Any suggestions?

GLFW, Mesa, Ubuntu 16.04 AMD64
This was not easy to setup on Ubuntu 14.04, but now it just works.
sudo apt-get install libglfw3-dev libgles2-mesa-dev
gcc glfw_triangle.c -lGLESv2 -lglfw
Output:
glfw_triangle.c
#include <stdio.h>
#include <stdlib.h>
#define GLFW_INCLUDE_ES2
#include <GLFW/glfw3.h>
static const GLuint WIDTH = 800;
static const GLuint HEIGHT = 600;
static const GLchar* vertex_shader_source =
"#version 100\n"
"attribute vec3 position;\n"
"void main() {\n"
" gl_Position = vec4(position, 1.0);\n"
"}\n";
static const GLchar* fragment_shader_source =
"#version 100\n"
"void main() {\n"
" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
static const GLfloat vertices[] = {
0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
};
GLint common_get_shader_program(const char *vertex_shader_source, const char *fragment_shader_source) {
enum Consts {INFOLOG_LEN = 512};
GLchar infoLog[INFOLOG_LEN];
GLint fragment_shader;
GLint shader_program;
GLint success;
GLint vertex_shader;
/* Vertex shader */
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, NULL);
glCompileShader(vertex_shader);
glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(vertex_shader, INFOLOG_LEN, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n%s\n", infoLog);
}
/* Fragment shader */
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, NULL);
glCompileShader(fragment_shader);
glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(fragment_shader, INFOLOG_LEN, NULL, infoLog);
printf("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n%s\n", infoLog);
}
/* Link shaders */
shader_program = glCreateProgram();
glAttachShader(shader_program, vertex_shader);
glAttachShader(shader_program, fragment_shader);
glLinkProgram(shader_program);
glGetProgramiv(shader_program, GL_LINK_STATUS, &success);
if (!success) {
glGetProgramInfoLog(shader_program, INFOLOG_LEN, NULL, infoLog);
printf("ERROR::SHADER::PROGRAM::LINKING_FAILED\n%s\n", infoLog);
}
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
return shader_program;
}
int main(void) {
GLuint shader_program, vbo;
GLint pos;
GLFWwindow* window;
glfwInit();
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
window = glfwCreateWindow(WIDTH, HEIGHT, __FILE__, NULL, NULL);
glfwMakeContextCurrent(window);
printf("GL_VERSION : %s\n", glGetString(GL_VERSION) );
printf("GL_RENDERER : %s\n", glGetString(GL_RENDERER) );
shader_program = common_get_shader_program(vertex_shader_source, fragment_shader_source);
pos = glGetAttribLocation(shader_program, "position");
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glViewport(0, 0, WIDTH, HEIGHT);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0);
glEnableVertexAttribArray(pos);
glBindBuffer(GL_ARRAY_BUFFER, 0);
while (!glfwWindowShouldClose(window)) {
glfwPollEvents();
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shader_program);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
}
glDeleteBuffers(1, &vbo);
glfwTerminate();
return EXIT_SUCCESS;
}
The key line lines of code are:
#define GLFW_INCLUDE_ES2
#include <GLFW/glfw3.h>
GLFW_INCLUDE_ES2 is documented at: http://www.glfw.org/docs/latest/build_guide.html#build_macros and a quick look at the source shows that it forwards to GLES:
#elif defined(GLFW_INCLUDE_ES2)
#include <GLES2/gl2.h>
#if defined(GLFW_INCLUDE_GLEXT)
#include <GLES2/gl2ext.h>
#endif
This source seems to be written in the common subset of GLES and OpenGL (like much of GLES), and also compiles with -lGL if we remove the #define GLFW_INCLUDE_ES2.
If we add things which are not in GLES like immediate rendering glBegin, link fails as expected.
See also: https://askubuntu.com/questions/244133/how-do-i-get-egl-and-opengles-libraries-for-ubuntu-running-on-virtualbox
Credits: genpfult made the code much more correct.
ARM Mali OpenGL ES SDK
download from: http://malideveloper.arm.com/resources/sdks/opengl-es-sdk-for-linux/
open the documentation HTML on a browser
follow the "Quick Start Guide", it's simple
Contains several interesting open source examples + windowing system boilerplate (X11 + EGL).
The build system supports easy cross compilation for ARM / Mali SoCs, but I haven't tested that yet.
The key component included seems to be the "OpenGL ES Emulator" http://malideveloper.arm.com/resources/tools/opengl-es-emulator/ which "maps OpenGL ES 3.2 API calls to the OpenGL API". But that does not ship with source, only precompiled.
Uses a custom enterprisey EULA that appears to be permissive, but yeah, ask your lawyer.
Tested on SDK v2.4.4.

Mesa supports it. If you want to restrict yourself to OpenGL ES only then you'll need to build it into a separate directory and then add the appropriate include and library directories.

Update:
You can (still) use PowerVR SDK and now it supports Vulkan as well. Updated links:
PowerVR SDK page: https://www.imgtec.com/developers/powervr-sdk-tools/powervr-sdk/
Installers download page: https://www.imgtec.com/developers/powervr-sdk-tools/installers/
Github repo: https://github.com/powervr-graphics/Native_SDK
At the time of my original answer, PowerVR SDK was the most complete solution (Mesa gained full OpenGL ES 2.0 support with its 3.1 release according to its Wikipedia page).
Now, Mesa and Mali SDK can also be a choice. For detailed info on those, please refer to this detailed answer by Ciro Santilli 冠状病毒审查六四事件法轮功
Original Answer:
You can use POWERVR SDK to emulate Opengl es on your PC. You can download the SDK here. The archive provides the necessary steps to install the emulation libraries as a documentation file and includes tutorials and demo applications with source codes.

Develop to the OpenGL 2.0 standard, and don't use immediate mode or fixed function mode. Essentially your program will be ES 2.0 compliant.

you can generate a header that has only those functions that you really need. And with glfw you can create an opengl es context. So you can't accidently use functions that you do not want to use, because they won't be defined in this way. I found this that might help you here.
gl load from the unofficial opengl sdk

Related

Relating vertex buffers and vertex attributes in openGL

My entire code that is supposed to draw a triangle on the screen is:
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <string.h>
const GLint WIDTH=800, HEIGHT=600;
GLuint VAO, VBO, shader;
//Vertex Shader
/*static const char**/
const GLchar* vShader = "\n"
"\n"
"#version 330 \n"
"layout (location=0) in vec3 pos;\n"
"void main(){\n"
"gl_Position = vec4(pos.x,pos.y,pos.z,1.0);\n"
"\n"
"}\n"
"";
// fragment shader
const GLchar* fShader = "\n"
"#version 330 \n"
"out vec4 colour;\n"
"void main(){\n"
"colour = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n"
"\n"
"\n";
void CreateTriangle(){
GLfloat vertices[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
// vertex arrays
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
// vertex buffers
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER,VBO);
glBufferData(GL_ARRAY_BUFFER,sizeof(GLfloat)*9,vertices,GL_STATIC_DRAW);
glVertexAttribPointer(0,3, GL_FLOAT,GL_FALSE,0, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
void AddShader(GLuint theProgram, const GLchar* shaderCode, GLenum shaderType){
GLuint theShader = glCreateShader(shaderType);
const GLchar* theCode[1];
theCode[0] = shaderCode;
GLint codeLength[1];
codeLength[0] = strlen(shaderCode);
glShaderSource(theShader, 1, theCode, codeLength);
glCompileShader(theShader);
GLint result=0;
GLchar eLog[1024]={};
glGetShaderiv(theShader, GL_COMPILE_STATUS, &result);
if(!result){
glGetShaderInfoLog(theShader,sizeof(eLog),NULL, eLog);
std::cout<< "Error compiling"<<shaderType<<" "<<eLog <<std::endl;
return;
}
glAttachShader(theProgram,theShader);
}
void CompileShader(){
shader = glCreateProgram();
if(!shader){
std::cout<<"Error Creating Shader Program";
return;
}
AddShader(shader, vShader,GL_VERTEX_SHADER);
AddShader(shader, fShader,GL_FRAGMENT_SHADER);
// getting error codes
GLint result=0;
GLchar eLog[1024]={0};
// Creates the executables in the graphic card
glLinkProgram(shader);
// get information if program is linked properly
glGetProgramiv(shader, GL_LINK_STATUS, &result);
if(!result){
glGetProgramInfoLog(shader,sizeof(eLog),NULL,eLog);
std::cout<<"Error linking program"<<eLog<<std::endl;
return;
}
glValidateProgram(shader);
glGetProgramiv(shader,GL_VALIDATE_STATUS,&result);
if(!result){
glGetProgramInfoLog(shader, sizeof(eLog),NULL, eLog);
std::cout<<"Error validating program"<<eLog<<std::endl;
return;
}
}
int main(void){
if(!glfwInit()){
std::cout << "glfw initialization failed" << std::endl;
glfwTerminate();
return 1;
}
// glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR,3);
// glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR,3);
// glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
GLFWwindow *mainWindow = glfwCreateWindow(WIDTH, HEIGHT, "NEW WINDOW", NULL, NULL);
if(!mainWindow){
std::cout<< "Window creation failed" <<std::endl;
glfwTerminate();
return 1;
}
int bufferWidth, bufferHeight;
glfwGetFramebufferSize(mainWindow, &bufferWidth, &bufferHeight);
glfwMakeContextCurrent(mainWindow);
if(glewInit() != GLEW_OK){
std::cout << "GLEW Initialization failed" << std::endl;
glfwDestroyWindow(mainWindow);
glfwTerminate();
return 1;
}
glViewport(0,0,bufferWidth, bufferHeight);
CreateTriangle();
CompileShader();
while(!glfwWindowShouldClose(mainWindow)){
glfwPollEvents();
glUseProgram(shader);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES,0,3);
glBindVertexArray(0);
glUseProgram(0);
glfwSwapBuffers(mainWindow);
std::cout<<"something"<<std::endl;
}
return 0;
}
It essentially draws a black screen and there's no error whatsoever but is supposed to draw a red triangle so I'm trying to debug this code and there is essentially some parts in the code that I don't understand.
1) How does the VBO (Vertex Buffer Object) relate to the VAO (Vertex Attribute object), we basically defined these using the following inside of the CreateTriangles() function:
...
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
// vertex buffers
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER,VBO);
glBufferData(GL_ARRAY_BUFFER,sizeof(GLfloat)*9,vertices,GL_STATIC_DRAW);
glVertexAttribPointer(0,3, GL_FLOAT,GL_FALSE,0, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
...
Note that we already unbind both the VAO and VBO, but during the drawing call inside the while loop:
while(!glfwWindowShouldClose(mainWindow)){
glfwPollEvents();
glUseProgram(shader);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES,0,3);
glBindVertexArray(0);
glUseProgram(0);
glfwSwapBuffers(mainWindow);
std::cout<<"something"<<std::endl;
}
we only rebind the VAO and not the VBO, which I think may be the result of the error, but I don't know for sure.
Also, the tutorial says that the VBO is bound inside the VAO but I don't see there being any linking or anything that related VBO to the VAO in the code, so I'm really being confused on how is it that we're binding them together and why we're only binding back the VAO and not the VBO during the drawing stage?
I'm using Linux OS and used the following to compile:
g++ -std=c++17 main.cpp -o main -lGL -lGLEW -lglfw && ./main
From comments above, credit to #Rabbid76:
When glVertexAttribPointer is called than the a name reference of the current VBO is stored in the current VAO. The current VAO is bound by glBindVertexArray(VAO); and the current VBO is bound by glBindBuffer(GL_ARRAY_BUFFER,VBO);. glVertexAttribPointer associates VBO to the resource index 0 in VAO. This association is stored in the state vector of VAO. So it is sufficient to bind VAO(glBindVertexArray(VAO)) before the draw call. Hence, only binding the VAO is sufficient.
As for the problem stated in the comments regarding the black screen try
Updating the glew version and your graphic card drive. If updating glew is not possible, then just set the glExperimental=GL_TRUE.

OpenGL Without GUI

Let's say I run a Linux and I have no desktop environment installed. I boot up my system and all I have is my shell.
Is it possible to compile a program that uses the OpenGL libraries or directly uses the GPU driver to draw to the screen?
As far as I could understand I would always need some kind of desktop environment that would provide me a window that I can draw on. To keep it
simple let's say I just want to draw a simple 2d shape like a triangle in the middle of the screen for example.
And if that's possible how can I do it and where can I read more about the topic? If I am able to draw directly over my terminal does this mean that I would be able to run my app on a system that has a desktop environment and still be able to see my triangle?
Is it possible to compile a program that uses the OpenGL libraries or directly uses the GPU driver to draw to the screen?
Yes. With the EGL API this has been formalized and works most well with NVidia GPUs and their proprietary drivers. NVidia has it described on their dev blog here https://devblogs.nvidia.com/egl-eye-opengl-visualization-without-x-server/
Essentially the steps are:
Create a OpenGL context for a PBuffer
#include <EGL/egl.h>
static const EGLint configAttribs[] = {
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 8,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_NONE
};
static const int pbufferWidth = 9;
static const int pbufferHeight = 9;
static const EGLint pbufferAttribs[] = {
EGL_WIDTH, pbufferWidth,
EGL_HEIGHT, pbufferHeight,
EGL_NONE,
};
int main(int argc, char *argv[])
{
// 1. Initialize EGL
EGLDisplay eglDpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint major, minor;
eglInitialize(eglDpy, &major, &minor);
// 2. Select an appropriate configuration
EGLint numConfigs;
EGLConfig eglCfg;
eglChooseConfig(eglDpy, configAttribs, &eglCfg, 1, &numConfigs);
// 3. Create a surface
EGLSurface eglSurf = eglCreatePbufferSurface(eglDpy, eglCfg,
pbufferAttribs);
// 4. Bind the API
eglBindAPI(EGL_OPENGL_API);
// 5. Create a context and make it current
EGLContext eglCtx = eglCreateContext(eglDpy, eglCfg, EGL_NO_CONTEXT,
NULL);
eglMakeCurrent(eglDpy, eglSurf, eglSurf, eglCtx);
// from now on use your OpenGL context
// 6. Terminate EGL when finished
eglTerminate(eglDpy);
return 0;
}
and then go about the rest as per usual. Or you can even ditch the PBuffer completely and just use OpenGL manages resources, i.e. render to framebuffer objects. For that end you can omit creating the surface and just make the context current.
Here's an example for using EGL without display, no EGL surface, with OpenGL managed framebuffer.
#include <GL/glew.h>
#include <GL/glut.h>
#include <EGL/egl.h>
#include <unistd.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <math.h>
#include <stdio.h>
using namespace std;
namespace render
{
int width, height;
float aspect;
void init();
void display();
int const fbo_width = 512;
int const fbo_height = 512;
GLuint fb, color, depth;
void *dumpbuf;
int dumpbuf_fd;
};
static const EGLint configAttribs[] = {
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 8,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_NONE
};
int main(int argc, char *argv[])
{
// 1. Initialize EGL
EGLDisplay eglDpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint major, minor;
eglInitialize(eglDpy, &major, &minor);
// 2. Select an appropriate configuration
EGLint numConfigs;
EGLConfig eglCfg;
eglChooseConfig(eglDpy, configAttribs, &eglCfg, 1, &numConfigs);
// 3. Bind the API
eglBindAPI(EGL_OPENGL_API);
// 3. Create a context and make it current
EGLContext eglCtx = eglCreateContext(eglDpy, eglCfg, EGL_NO_CONTEXT,
NULL);
eglMakeCurrent(eglDpy, EGL_NO_SURFACE, EGL_NO_SURFACE, eglCtx);
glewInit();
// from now on use your OpenGL context
render::init();
render::display();
// 4. Terminate EGL when finished
eglTerminate(eglDpy);
return 0;
}
void CHECK_FRAMEBUFFER_STATUS()
{
GLenum status;
status = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER);
switch(status) {
case GL_FRAMEBUFFER_COMPLETE:
break;
case GL_FRAMEBUFFER_UNSUPPORTED:
/* choose different formats */
break;
default:
/* programming error; will fail on all hardware */
throw "Framebuffer Error";
}
}
namespace render
{
float const light_dir[]={1,1,1,0};
float const light_color[]={1,0.95,0.9,1};
void init()
{
glGenFramebuffers(1, &fb);
glGenTextures(1, &color);
glGenRenderbuffers(1, &depth);
glBindFramebuffer(GL_FRAMEBUFFER, fb);
glBindTexture(GL_TEXTURE_2D, color);
glTexImage2D( GL_TEXTURE_2D,
0,
GL_RGB8,
fbo_width, fbo_height,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color, 0);
glBindRenderbuffer(GL_RENDERBUFFER, depth);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, fbo_width, fbo_height);
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depth);
GLint red_bits, green_bits, blue_bits, alpha_bits;
glGetIntegerv(GL_RED_BITS, &red_bits);
glGetIntegerv(GL_GREEN_BITS, &green_bits);
glGetIntegerv(GL_BLUE_BITS, &blue_bits);
glGetIntegerv(GL_ALPHA_BITS, &alpha_bits);
fprintf(stderr, "FBO format R%dG%dB%dA%d\n",
(int)red_bits,
(int)green_bits,
(int)blue_bits,
(int)alpha_bits );
CHECK_FRAMEBUFFER_STATUS();
dumpbuf_fd = open("/tmp/fbodump.rgb", O_CREAT|O_SYNC|O_RDWR, S_IRUSR|S_IWUSR);
assert(-1 != dumpbuf_fd);
dumpbuf = malloc(fbo_width*fbo_height*3);
assert(dumpbuf);
}
void render()
{
static float a=0, b=0, c=0;
glBindTexture(GL_TEXTURE_2D, 0);
glEnable(GL_TEXTURE_2D);
glBindFramebuffer(GL_FRAMEBUFFER, fb);
glViewport(0,0,fbo_width, fbo_height);
glClearColor(0,0,0,0);
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1, 1, -1, 1, -1, 1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_TRIANGLES);
glColor3f(1,0,0);
glVertex3f(1,0,0);
glColor3f(0,1,0);
glVertex3f(0,1,0);
glColor3f(0,0,1);
glVertex3f(0,0,1);
glEnd();
glReadBuffer(GL_COLOR_ATTACHMENT0);
glReadPixels(0,0,fbo_width,fbo_height,GL_RGB,GL_UNSIGNED_BYTE,dumpbuf);
lseek(dumpbuf_fd, SEEK_SET, 0);
write(dumpbuf_fd, dumpbuf, fbo_width*fbo_height*3);
}
}

glUseProgram() GL_INVALID_OPERATION 1282 on Ubuntu Gnome 17.04 Intel HD4000

I'm trying to learn openGL 3.0 by following the example on https://open.gl/drawing (It just draws a coloured rectangle on the screen).
I am using SDL2.0.5 and glew 2.0.0 with VS2015 community and it works perfectly on my i5 skylake HD4000 laptop running Win7 pro x64. However when I compile the exact same source on Ubuntu Gnome 17.04 x64 on the same laptop (dual booting Win and Linux) with gcc and glew 2.0.0, I get a GL_INVALID_OPERATION 1282 error at glUseProgram(shaderProgram).
Compiler runs with no warnings:
g++ main.cpp -Wall -I/usr/include/SDL2 -lGL -lGLEW -lSDL2 -lSDL2_image -lSDL2_mixer -o game
There are no errors before this function, (I removed the error checking code for clarity).
Also the screen goes from normal desktop to a blank black screen, then repeats until I quit the app. The screen blanking does not happen if I comment out the SDL_GL_SwapWindow(gameWindow), but the error is still there.
I have tried changing the context to 3.3 and shader versions to #version 330 core - same problem.
I also used the Intel Graphics Update Tool to get the latest drivers.
Some system info below:
product: Intel(R) Core(TM) i5-3340M CPU # 2.70GHz
vendor: Intel Corp.
physical id: 1
bus info: cpu#0
size: 3199MHz
capacity: 3400MHz
width: 64 bits
capabilities: fpu fpu_exception wp vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx rdtscp x86-64 constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm epb tpr_shadow vnmi flexpriority ept vpid fsgsbase smep erms xsaveopt dtherm ida arat pln pts cpufreq
description: VGA compatible controller
product: 3rd Gen Core processor Graphics Controller
vendor: Intel Corporation
physical id: 2
bus info: pci#0000:00:02.0
version: 09
width: 64 bits
clock: 33MHz
capabilities: msi pm vga_controller bus_master cap_list rom
configuration: driver=i915 latency=0
resources: irq:29 memory:f6400000-f67fffff memory:e0000000-efffffff ioport:f000(size=64) memory:c0000-dffff
OpenGL vendor string: Intel Open Source Technology Center
OpenGL renderer string: Mesa DRI Intel(R) Ivybridge Mobile
OpenGL core profile version string: 3.3 (Core Profile) Mesa 17.0.3
OpenGL core profile shading language version string: 3.30
OpenGL core profile context flags: (none)
OpenGL core profile profile mask: core profile
OpenGL core profile extensions:
OpenGL version string: 3.0 Mesa 17.0.3
OpenGL shading language version string: 1.30
OpenGL context flags: (none)
OpenGL extensions:
OpenGL ES profile version string: OpenGL ES 3.0 Mesa 17.0.3
OpenGL ES profile shading language version string: OpenGL ES GLSL ES 3.00
OpenGL ES profile extensions:
Source code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <SDL.h>
#include <SDL_opengl.h>
#include <iostream>
// SDL2 global pointers
SDL_Window* gameWindow = NULL;
SDL_GLContext context = NULL;
SDL_Surface* screenSurface = NULL;
SDL_Renderer* gameRenderer = NULL;
// Shader sources
const GLchar* vertexSource = R"glsl(
#version 150 core
in vec2 position;
in vec3 color;
out vec3 Color;
void main()
{
Color = color;
gl_Position = vec4(position, 0.0, 1.0);
}
)glsl";
const GLchar* fragmentSource = R"glsl(
#version 150 core
in vec3 Color;
out vec4 outColor;
void main()
{
outColor = vec4(Color, 1.0);
}
)glsl";
int main(int argc, char *argv[])
{
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
if(SDL_Init(SDL_INIT_EVERYTHING) != 0)
{
printf("SDL_Init failed! Error: %s\n", SDL_GetError());
return -1;
}
else printf("SDL_Init OK.\n");
gameWindow = SDL_CreateWindow("OpenGL", 100, 100, 400, 300, SDL_WINDOW_OPENGL);
if (gameWindow == NULL)
{
printf("SDL_CreateWindow failed! Error: %s\n", SDL_GetError());
SDL_Delay(1000);
SDL_Quit();
return -1;
}
else printf("SDL_CreateWindow OK.\n");
context = SDL_GL_CreateContext(gameWindow);
if(context == NULL)
{
printf("SDL_CreateWindow failed! Error: %s\n", SDL_GetError());
SDL_Delay(1000);
SDL_DestroyWindow(gameWindow);
SDL_Quit();
return -1;
}
else printf("SDL_GL_CreateContext OK.\n");
glewExperimental=GL_TRUE;
GLenum glew_init_error = glewInit();
if (GLEW_OK != glew_init_error)
{
fprintf(stderr, "glewInit failed! Error: %s\n", glewGetErrorString(glew_init_error));
SDL_Delay(1000);
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(gameWindow);
SDL_Quit();
return -1;
}
else fprintf(stdout, "glewInit OK. Version: %s\n", glewGetString(GLEW_VERSION));
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
GLfloat vertices[] = {
-0.5f, 0.5f, 1.0f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, // Top-right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, // Bottom-right
-0.5f, -0.5f, 1.0f, 1.0f, 1.0f // Bottom-left
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// Create an element array
GLuint ebo;
glGenBuffers(1, &ebo);
GLuint elements[] = {
0, 1, 2,
2, 3, 0
};
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(elements), elements, GL_STATIC_DRAW);
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
// ************** THE NEXT FUNCTION CALL FAILS WITH GL_INVALID_OPERATION *********************************
glUseProgram(shaderProgram);
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), 0);
GLint colAttrib = glGetAttribLocation(shaderProgram, "color");
glEnableVertexAttribArray(colAttrib);
glVertexAttribPointer(colAttrib, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
SDL_Event windowEvent;
while (true)
{
if (SDL_PollEvent(&windowEvent))
{
if (windowEvent.type == SDL_QUIT) break;
if (windowEvent.type == SDL_KEYUP && windowEvent.key.keysym.sym == SDLK_ESCAPE) break;
}
// Clear the screen to black
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Draw a rectangle from the 2 triangles using 6 indices
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
SDL_GL_SwapWindow(gameWindow);
}
glDeleteProgram(shaderProgram);
glDeleteShader(fragmentShader);
glDeleteShader(vertexShader);
glDeleteBuffers(1, &ebo);
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
SDL_GL_DeleteContext(context);
SDL_Delay(1000);
SDL_Quit();
return 0;
}
Check whether the shaders compiled and linked successfully with glGetShaderiv/InfoLog and glGetProgramiv/InfoLog – pleluron
Thanks to Pleluron I added shader and linker error log code and found that, on my core i5 HD4000 Linux, the GPU driver did not support the 1.5 (#version 150) shader version I was using. When I changed the vertex & fragment shader versions to 1.3 (#version 130) and the major/minor SDL context attributes to 3.0 it cured the problem. (Although I did have to modify the shader code for GL3.0/GLSL1.3).
What confuses me is that glxinfo reports OpenGL3.3 and GLSL 3.3 below so I expected to be able to use precisely that...
OpenGL vendor string: Intel Open Source Technology Center
OpenGL renderer string: Mesa DRI Intel(R) Ivybridge Mobile
OpenGL core profile version string: 3.3 (Core Profile) Mesa 17.0.3
OpenGL core profile shading language version string: 3.30
OpenGL core profile context flags: (none)
OpenGL core profile profile mask: core profile
OpenGL core profile extensions:
... but also reports OpenGL 3.0 & GLSL 1.30 below, which seems to be the one being used when the context is created through SDL despite being asked for a higer version?
OpenGL version string: 3.0 Mesa 17.0.3
OpenGL shading language version string: 1.30
OpenGL context flags: (none)
I will dig into this and update this answer.

Attach GLX contexts with different visuals to the same X Window subsequently

I'm trying to create a GLX context, attach it to a X Window, detach and destroy it again, then create another GLX context with a different Visual and attach it to the same window.
#include <GL/glx.h>
#include <X11/Xlib.h>
#include <stdlib.h>
#include <stdio.h>
// Descriptions for the visuals to try - if both are equal, the example works
static int attr_sets[][3] = {
{ GLX_RGBA, GLX_DOUBLEBUFFER, None },
{ GLX_RGBA, None }
};
Display *dpy;
XVisualInfo *vi;
GLXContext cxt;
Window wnd;
size_t i;
void fail(const char *m) { fprintf(stderr, "fail: %s #%lu\n", m, i+1); abort(); }
int main(void) {
dpy = XOpenDisplay(NULL);
wnd = XCreateSimpleWindow(dpy, RootWindow(dpy, 0), 0, 0, 1, 1, 1, 0, 0);
for (i = 0; i < 2; ++i) {
if (!(vi = glXChooseVisual(dpy, 0, attr_sets[1]))) fail("choose");
if (!(cxt = glXCreateContext(dpy, vi, None, True))) fail("create");
XFree(vi);
if (!glXMakeCurrent(dpy, wnd, cxt)) fail("attach");
if (!glXMakeCurrent(dpy, wnd, 0)) fail("detach");
glXDestroyContext(dpy, cxt);
}
XDestroyWindow(dpy, wnd);
XCloseDisplay(dpy);
return 0;
}
This example works on Mesa 10.5.2 with Intel graphics but fails on AMD fglrx 12.104 when the second context is attached (fail: attach #2).
What is the reason for this error? Is this forbidden by specification or is it a driver error?
If you look at the definition of XCreateSimpleWindow you'll see, that it's actually just a wrapper around XCreateWindow. XCreateWindow in turn will use the visual of it's parent.
Now X11 visuals are only half the story. When you attach a OpenGL context to a Drawable for the first time, the visual (and for the more advanced features also its FBConfig) of that Drawable may become refined, so that later on only OpenGL contexts compatible with that configurations can be attached.
In short once a Drawables Visual/FBConfig has been pinned down, only OpenGL contexts compatible to it can be attached. See the error's defined for glXMakeCurrent, notably
BadMatch is generated if drawable was not created with the same X
screen and visual as ctx. It is also generated if drawable is None and
ctx is not NULL.
Normally when using GLX you'd use glXCreateWindow to create a OpenGL exclusive subwindow in your main window, which Visual/FBConfig you can set without affecting your main window.

Any GLES examples, in C++, on x86 Linux?

I'm looking for a good source of GLES2 samples for C++ (or C) on x86 Linux with Xorg.
The samples I can find are all in Objective C for iOS, or Java for Android, or JavaScript for WebGL.
The Kronos web site has a "tutorials" section that contains two lines saying "our tutorials index will go here." Given that GLES2 is 5 years old, I don't have much hope on a sudden surge of content there.
I already know OpenGL pretty well. I'd just like some convenient source for copy-and-paste context set-up code, really. Where can I find something like that?
Mesa demos!
http://cgit.freedesktop.org/mesa/demos
http://cgit.freedesktop.org/mesa/demos/tree/src/egl/opengles2
http://cgit.freedesktop.org/mesa/demos/tree/src/egl/opengles2/es2tri.c
GLFW, Mesa, Ubuntu 16.04 AMD64
I'm not sure if GLUT supports GLES, but GLFW does, greatly simplifying window management.
sudo apt-get install libglfw3-dev libgles2-mesa-dev
gcc glfw_triangle.c -lGLESv2 -lglfw
Output:
Source:
#include <stdio.h>
#include <stdlib.h>
#define GLFW_INCLUDE_ES2
#include <GLFW/glfw3.h>
static const GLuint WIDTH = 800;
static const GLuint HEIGHT = 600;
static const GLchar* vertex_shader_source =
"#version 100\n"
"attribute vec3 position;\n"
"void main() {\n"
" gl_Position = vec4(position, 1.0);\n"
"}\n";
static const GLchar* fragment_shader_source =
"#version 100\n"
"void main() {\n"
" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
static const GLfloat vertices[] = {
0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
};
GLint common_get_shader_program(const char *vertex_shader_source, const char *fragment_shader_source) {
enum Consts {INFOLOG_LEN = 512};
GLchar infoLog[INFOLOG_LEN];
GLint fragment_shader;
GLint shader_program;
GLint success;
GLint vertex_shader;
/* Vertex shader */
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, NULL);
glCompileShader(vertex_shader);
glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(vertex_shader, INFOLOG_LEN, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n%s\n", infoLog);
}
/* Fragment shader */
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, NULL);
glCompileShader(fragment_shader);
glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(fragment_shader, INFOLOG_LEN, NULL, infoLog);
printf("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n%s\n", infoLog);
}
/* Link shaders */
shader_program = glCreateProgram();
glAttachShader(shader_program, vertex_shader);
glAttachShader(shader_program, fragment_shader);
glLinkProgram(shader_program);
glGetProgramiv(shader_program, GL_LINK_STATUS, &success);
if (!success) {
glGetProgramInfoLog(shader_program, INFOLOG_LEN, NULL, infoLog);
printf("ERROR::SHADER::PROGRAM::LINKING_FAILED\n%s\n", infoLog);
}
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
return shader_program;
}
int main(void) {
GLuint shader_program, vbo;
GLint pos;
GLFWwindow* window;
glfwInit();
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
window = glfwCreateWindow(WIDTH, HEIGHT, __FILE__, NULL, NULL);
glfwMakeContextCurrent(window);
printf("GL_VERSION : %s\n", glGetString(GL_VERSION) );
printf("GL_RENDERER : %s\n", glGetString(GL_RENDERER) );
shader_program = common_get_shader_program(vertex_shader_source, fragment_shader_source);
pos = glGetAttribLocation(shader_program, "position");
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glViewport(0, 0, WIDTH, HEIGHT);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0);
glEnableVertexAttribArray(pos);
glBindBuffer(GL_ARRAY_BUFFER, 0);
while (!glfwWindowShouldClose(window)) {
glfwPollEvents();
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shader_program);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
}
glDeleteBuffers(1, &vbo);
glfwTerminate();
return EXIT_SUCCESS;
}
The key line lines of code are:
#define GLFW_INCLUDE_ES2
#include <GLFW/glfw3.h>
GLFW_INCLUDE_ES2 is documented at: http://www.glfw.org/docs/latest/build_guide.html#build_macros and a quick look at the source shows that it forwards to GLES:
#elif defined(GLFW_INCLUDE_ES2)
#include <GLES2/gl2.h>
#if defined(GLFW_INCLUDE_GLEXT)
#include <GLES2/gl2ext.h>
#endif
This source seems to be is in the common subset of GLES and OpenGL (like much of GLES), and also compiles with -lGL if we remove the #define GLFW_INCLUDE_ES2.
If we add things which are not in GLES like immediate rendering glBegin, link fails as expected.
See also: How to develop OpenGL ES (GLES) 2.0 applications on Linux?
Credits: genpfult made the code much more correct.
ARM Mali OpenGL ES SDK
download from: http://malideveloper.arm.com/resources/sdks/opengl-es-sdk-for-linux/
open the documentation HTML on a browser
follow the "Quick Start Guide", it's simple
Contains several interesting open source examples + windowing system boilerplate (X11 + EGL).
The build system supports easy cross compilation for ARM / Mali SoCs, but I haven't tested that yet.
The key component included seems to be the "OpenGL ES Emulator" http://malideveloper.arm.com/resources/tools/opengl-es-emulator/ which "maps OpenGL ES 3.2 API calls to the OpenGL API". But that does not ship with source, only precompiled.
Uses a custom enterprisey EULA that appears to be permissive, but yeah, ask your lawyer.
Tested on SDK v2.4.4.

Resources