Take the 2-minute tour ×
Game Development Stack Exchange is a question and answer site for professional and independent game developers. It's 100% free, no registration required.

Basically I implemented gamma correction into a q3 based game to get rid of the annoying SetDeviceGammaRamp. On windows and macosx everything works fine, but on linux it draws garbage. It looks like it is zoomed in a lot because sometimes when I move the cursor I can actually see it, but in the size of the whole scene. The gamma correction itself seems to work, because I can see it changing the few "zoomed in pixels" on the screen.

const char *g_GammaVertexShaderARB = {
    "void main(void)" "\n"
    "{" "\n"
        "gl_Position = ftransform();" "\n"
        "gl_TexCoord[0] = gl_MultiTexCoord0;"  "\n"
    "}"
};

const char *g_GammaFragmentShaderARB = {
    "uniform sampler2D sceneBuffer;" "\n"
    "uniform float gamma;" "\n"
    "\n"
    "void main(void)" "\n"
    "{" "\n"
        "vec2 uv = gl_TexCoord[0].xy;" "\n"
        "vec3 color = texture2D(sceneBuffer, uv).rgb;" "\n"
        "gl_FragColor.rgb = pow(color, vec3(1.0 / gamma));" "\n"
    "}"
};

On the beginning of a frame:

qglBindFramebufferEXT(GL_FRAMEBUFFER_EXT, tr.gammaFramebuffer);

On the end of a frame:

qglBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
qglBindRenderbufferEXT(GL_RENDERBUFFER_EXT, 0);

qglViewport(0, 0, glConfig.vidWidth, glConfig.vidHeight);
qglScissor(0, 0, glConfig.vidWidth, glConfig.vidHeight);
qglMatrixMode(GL_PROJECTION);
qglLoadIdentity();
qglOrtho(0, glConfig.vidWidth, glConfig.vidHeight, 0, 0, 1);
qglMatrixMode(GL_MODELVIEW);
qglLoadIdentity();

GL_State(GLS_DEPTHTEST_DISABLE);

qglUseProgramObjectARB(tr.gammaProgram);
qglEnable(GL_TEXTURE_2D);

qglUniform1iARB(tr.gammaSceneBufferLoc, 0);
qglUniform1fARB(tr.gammaUniformLoc, r_gamma->value);

qglColor3f(tr.identityLight, tr.identityLight, tr.identityLight);
qglActiveTextureARB(GL_TEXTURE0_ARB);
qglBindTexture(GL_TEXTURE_2D, tr.gammaRenderTarget);

qglBegin(GL_QUADS);
qglTexCoord2f(0, 1);
qglVertex2f(0, 0);
qglTexCoord2f(1, 1);
qglVertex2f(glConfig.vidWidth, 0);
qglTexCoord2f(1, 0);
qglVertex2f(glConfig.vidWidth, glConfig.vidHeight);
qglTexCoord2f(0, 0);
qglVertex2f(0, glConfig.vidHeight);
qglEnd();

qglUseProgramObjectARB(0);

initialization: (this one works and doesn't return an error)

GLint objStatus;

// shader
tr.m_hVShader = qglCreateShaderObjectARB(GL_VERTEX_SHADER_ARB);
qglShaderSourceARB(tr.m_hVShader, 1, (const GLcharARB **)&g_GammaVertexShaderARB, NULL);
qglCompileShaderARB(tr.m_hVShader);

tr.m_hFShader = qglCreateShaderObjectARB(GL_FRAGMENT_SHADER_ARB);
qglShaderSourceARB(tr.m_hFShader, 1, (const GLcharARB **)&g_GammaFragmentShaderARB, NULL);
qglCompileShaderARB(tr.m_hFShader);

// program
tr.gammaProgram = qglCreateProgramObjectARB();
qglAttachObjectARB(tr.gammaProgram, tr.m_hVShader);
qglAttachObjectARB(tr.gammaProgram, tr.m_hFShader);
qglLinkProgramARB(tr.gammaProgram);

qglUseProgramObjectARB(tr.gammaProgram);

tr.gammaUniformLoc = qglGetUniformLocationARB(tr.gammaProgram, "gamma");
tr.gammaSceneBufferLoc = qglGetUniformLocationARB(tr.gammaProgram, "sceneBuffer");

qglValidateProgramARB(tr.gammaProgram);
qglGetObjectParameterivARB(tr.gammaProgram, GL_OBJECT_VALIDATE_STATUS_ARB, &objStatus);
if (!objStatus) {
    return qtrue;
}

qglUseProgramObjectARB(0);

// framebuffer object
tr.gammaFramebuffer = 0;
qglGenFramebuffersEXT(1, &tr.gammaFramebuffer);
qglBindFramebufferEXT(GL_FRAMEBUFFER_EXT, tr.gammaFramebuffer);

// depth buffer
tr.gammaRenderDepthBuffer = 0;
qglGenRenderbuffersEXT(1, &tr.gammaRenderDepthBuffer);
qglBindRenderbufferEXT(GL_RENDERBUFFER_EXT, tr.gammaRenderDepthBuffer);
qglRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT, glConfig.vidWidth, glConfig.vidHeight);

// texture
tr.gammaRenderTarget = 0;
qglGenTextures(1, &tr.gammaRenderTarget);
qglBindTexture(GL_TEXTURE_2D, tr.gammaRenderTarget);
qglTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, glConfig.vidWidth, glConfig.vidHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, 0);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

qglFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, tr.gammaRenderTarget, 0);
qglFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, tr.gammaRenderDepthBuffer);
qglDrawBuffer(GL_COLOR_ATTACHMENT0_EXT);

qglBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);

if (qglCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT) != GL_FRAMEBUFFER_COMPLETE_EXT) {
    return qtrue;
}

return qfalse;

My question is: What could cause the shaders to work on win and osx but not on linux? I tested it on linux with my internal Intel HD Graphics 4600 GPU.

share|improve this question
    
It might be driver inconsistency, try with a different GPU. Both Nvidia and AMD have quite strict GLSL compiling rules and it might show you the error. –  akaltar 6 hours ago
    
I also tried it on several amd/nvidia cards. On windows no problem. Can't say much about macosx but the one i tested worked. –  ouned 4 hours ago

Your Answer

 
discard

By posting your answer, you agree to the privacy policy and terms of service.

Browse other questions tagged or ask your own question.