Trouble creating EGL/GLESv2 window on Linux

combolek wrote on Friday, June 21, 2013:

Hi,

I am using GLFW from master 3be3f58a92 compiled for EGL/GLES2 (with GLFW_USE_EGL=ON and GLFW_CLIENT_LIBRARY=glesv2) on Linux/X11 (Fedora 18).

I have a short test program that tries to open a window. It works correctly when compiled for full OpenGL (FULL_GL defined, link with regular GL/GLX build of GLFW) but it fails in glfwCreateWindow() when compiled for EGL/GLES2 with the following X error:

X Error of failed request:  BadMatch (invalid parameter attributes)
  Major opcode of failed request:  72 (X_PutImage)
  Serial number of failed request:  90
  Current serial number in output stream:  94

Interestingly, I get the same error on AMD and Nvidia hardware, but the test works fine on Intel Ivy-Bridge!

The test program is pasted below. Does anyone know what I am missing here?

Many thanks,

Bolek

#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#if defined(FULL_GL)
#include <GL/gl.h>
#else
#include <GLES2/gl2.h>
#endif
#define GLFW_INCLUDE_NONE       1
#include <GLFW/glfw3.h>

struct UserData
{
  bool paused;
};

static void Key(GLFWwindow* window, int key, int scancode, int action, int mods)
{
  if (action != GLFW_PRESS) {
    return;
  }
  UserData *user_data = (UserData *)glfwGetWindowUserPointer(window);
  switch (key) {
  case GLFW_KEY_ESCAPE:
    glfwSetWindowShouldClose(window, GL_TRUE);
    return;
  case GLFW_KEY_SPACE:
    user_data->paused = !user_data->paused;
    break;
  default:
    break;
  }
}

static void Reshape(GLFWwindow* window, int width, int height)
{
  printf("Reshape width = %d  height = %d\n", width, height);
  glViewport(0, 0, (GLint)width, (GLint)height);
}

static void Draw(GLFWwindow *window)
{
  glClearColor(0.0, (sin(glfwGetTime() * 4.0) + 1.0) / 2.0, 0.0, 0.0);
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  glfwSwapBuffers(window);
}

static void Error(int err, const char *msg)
{
  printf("GLFW Error 0x%x: %s\n", err, msg);
  exit(EXIT_FAILURE);
}

int main(int argc, char **argv)
{
  UserData user_data = {0};

  glfwSetErrorCallback(Error);
  glfwInit();

#if !defined(FULL_GL)
  glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API);
  glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
#endif
  GLFWwindow *window = glfwCreateWindow(512, 512, argv[0], NULL, NULL);

  glfwSetWindowUserPointer(window, &user_data);
  glfwSetWindowSizeCallback(window, Reshape);
  glfwSetKeyCallback(window, Key);
  glfwSetWindowPos(window, 200, 100);
  glfwMakeContextCurrent(window);

  // fake initial resize callback
  int width, height;
  glfwGetWindowSize(window, &width, &height);
  Reshape(window, width, height);

  while (!glfwWindowShouldClose(window)) {
    if (user_data.paused) {
      glfwWaitEvents();
    } else {
      glfwPollEvents();
      Draw(window);
    }
  }

  glfwDestroyWindow(window);
  glfwTerminate();
  return 0;
}

elmindreda wrote on Friday, June 21, 2013:

I don’t know what the cause could be. GLFW does not call XPutImage.

combolek wrote on Saturday, June 22, 2013:

Well, not directly… But it does call eglSwapBuffers when creating a window, which in turn calls XPutImage (actually xcb_put_image since Mesa EGL is using libxcb instead of libX11). At least that’s what happens with software rendering, which is what you get with Mesa EGL on Nvidia and AMD. On Intel you get real hardware rendering, which is probably why it works.

I kind of figured out what happens, but I don’t really understand it. It comes down to the wrong EGL config choice. For my test program to work with software rendering it needs config with EGL_ALPHA_SIZE=0. I don’t know why. Perhaps it’s a bug in Mesa? Anyway, so I need to do

glfwWindowHint(GLFW_ALPHA_BITS, 0);

But this by itself is not enough. It seems that GLFW is ignoring the size hints if they are set to 0 when choosing the “best” config. I had to make this change to have GLFW honour the no-alpha-channel hint:

diff --git a/src/context.c b/src/context.c
index deb7398..aaced2a 100644
--- a/src/context.c
+++ b/src/context.c
@@ -280,7 +280,6 @@ const _GLFWfbconfig* _glfwChooseFBConfig(const _GLFWfbconfig* desired,
         {
             extraDiff = 0;
 
-            if (desired->alphaBits > 0)
             {
                 extraDiff += (desired->alphaBits - current->alphaBits) *
                              (desired->alphaBits - current->alphaBits);

So, I am not sure what to think about it. On one hand it probably is a Mesa bug (or maybe my bug, but the test program is so simple…). On the other hand it would be nice if GLFW was honouring the hints as much as possible.

May I suggest that GLFW considers any size hints that are different than the default, even if set to zero?

Thanks

Bolek