From 97c24c9ef01e263c6497ae92b2261f17e15122c8 Mon Sep 17 00:00:00 2001 From: rdb Date: Sun, 1 Mar 2015 17:32:20 +0100 Subject: [PATCH] Fix a Cg crash with GL_BGRA color arrays, on NVIDIA cards this time --- panda/src/glstuff/glCgShaderContext_src.cxx | 15 ++++----------- panda/src/glstuff/glCgShaderContext_src.h | 1 - 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/panda/src/glstuff/glCgShaderContext_src.cxx b/panda/src/glstuff/glCgShaderContext_src.cxx index 85b18b6f9d..0b05426529 100755 --- a/panda/src/glstuff/glCgShaderContext_src.cxx +++ b/panda/src/glstuff/glCgShaderContext_src.cxx @@ -41,7 +41,6 @@ CLP(CgShaderContext):: CLP(CgShaderContext)(CLP(GraphicsStateGuardian) *glgsg, Shader *s) : ShaderContext(s) { _glgsg = glgsg; _cg_program = 0; - _glsl_profile = false; nassertv(s->get_language() == Shader::SL_Cg); @@ -63,10 +62,6 @@ CLP(CgShaderContext)(CLP(GraphicsStateGuardian) *glgsg, Shader *s) : ShaderConte release_resources(); } else { - if (cgGetProgramProfile(_cg_program) == CG_PROFILE_GLSLC) { - _glsl_profile = true; - } - cgGLLoadProgram(_cg_program); CGerror error = cgGetError(); if (error != CG_NO_ERROR) { @@ -412,7 +407,7 @@ disable_shader_vertex_arrays() { CGparameter p = _cg_parameter_map[_shader->_var_spec[i]._id._seqno]; if (p == 0) continue; - if (_glsl_profile && cgGetParameterBaseResource(p) == CG_ATTR0) { + if (cgGetParameterBaseResource(p) == CG_ATTR0) { int index = cgGetParameterResourceIndex(p); if (index >= 8) { _glgsg->_glClientActiveTexture(GL_TEXTURE0 + (index - 8)); @@ -501,11 +496,9 @@ update_shader_vertex_arrays(ShaderContext *prev, bool force) { num_values = GL_BGRA; } - // This is truly the most preposterous hack. When using the GLSL - // profiles, cgGLSetParameterPointer relies on the the driver mapping - // standard attributes to fixed indices (and breaking the spec doing - // so), which only the NVIDIA drivers do. Unbelievable. - if (_glsl_profile && cgGetParameterBaseResource(p) == CG_ATTR0) { + // cgGLSetParameterPointer is just stupidly bugged on every level. + // Sigh. This seems to work on both NVIDIA and AMD cards now. + if (cgGetParameterBaseResource(p) == CG_ATTR0) { int index = cgGetParameterResourceIndex(p); switch (index) { case 0: // gl_Vertex diff --git a/panda/src/glstuff/glCgShaderContext_src.h b/panda/src/glstuff/glCgShaderContext_src.h index b5d45d4aaa..16dd9bcdb2 100755 --- a/panda/src/glstuff/glCgShaderContext_src.h +++ b/panda/src/glstuff/glCgShaderContext_src.h @@ -52,7 +52,6 @@ public: private: CGprogram _cg_program; - bool _glsl_profile; pvector _cg_parameter_map;