From fdcbbeb55ecafe119bb98dcedb8492416f5bc966 Mon Sep 17 00:00:00 2001 From: Brian Date: Mon, 19 Mar 2007 14:44:15 -0600 Subject: [PATCH] Properly compute render_inputs_bitset when using a vertex program/shader. This fixes a performance regression introduced early in glsl-compiler-1 work. --- src/mesa/tnl/t_context.c | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/mesa/tnl/t_context.c b/src/mesa/tnl/t_context.c index 3b2f91a..f665485 100644 --- a/src/mesa/tnl/t_context.c +++ b/src/mesa/tnl/t_context.c @@ -150,13 +150,19 @@ _tnl_InvalidateState( GLcontext *ctx, GLuint new_state ) (ctx->VertexProgram._Enabled && ctx->VertexProgram.PointSizeEnabled)) RENDERINPUTS_SET( tnl->render_inputs_bitset, _TNL_ATTRIB_POINTSIZE ); -#if 1 /* XXX NEW_SLANG */ - RENDERINPUTS_SET_RANGE( tnl->render_inputs_bitset, - _TNL_FIRST_GENERIC, _TNL_LAST_GENERIC ); -#else - if (ctx->ShaderObjects._VertexShaderPresent || ctx->ShaderObjects._FragmentShaderPresent) - RENDERINPUTS_SET_RANGE( tnl->render_inputs_bitset, _TNL_FIRST_GENERIC, _TNL_LAST_GENERIC ); -#endif + /* check for varying vars which are written by the vertex program */ + { + struct gl_vertex_program *vp = ctx->VertexProgram._Current; + if (vp) { + GLuint i; + for (i = 0; i < MAX_VARYING; i++) { + if (vp->Base.OutputsWritten & (1 << (VERT_RESULT_VAR0 + i))) { + RENDERINPUTS_SET(tnl->render_inputs_bitset, + _TNL_ATTRIB_GENERIC(i)); + } + } + } + } } -- 2.7.4