1) Draws the sky brushes with no color (colormask to 0) but writing depth (Stencil op keep where drawn, set stencil to 1)
2) Then draw the normal sky or the sky box where stencil = 1, honoring the depth buffer.
Works fine on ATI and Intel. Fails on NVidia.
I expect to have to solve this myself and I should be able get my hands on a machine with an NVidia card for direct testing.
But wanted to ask around see if I am missing something obvious here ...
Code: Select all
void Sky_Stencil_Draw (void)
{
int i;
msurface_t *s;
texture_t *t;
// Baker: Direct3D doesn't have stencil at this time
if (!renderer.gl_stencilbits || vid.direct3d)
return;
// Baker: No sky to draw
if (!level.sky /*|| !frame.has_sky*/)
return;
// Baker: Where drawn (doesn't z-fail), replace with 1
// in the stencil buffer.
eglStencilFunc (GL_ALWAYS, 1, ~0 );
eglStencilOp (GL_KEEP, GL_KEEP, GL_REPLACE);
eglEnable (GL_STENCIL_TEST);
// Baker: A no draw pass of the sky brushes, does not even
// write to depth buffer (maybe it should?) that writes
// our stencil overlay.
eglColorMask (0,0,0,0);
// eglDepthMask (0); // Don't write depth to buffer
eglDisable (GL_TEXTURE_2D);
for (i=0 ; i<cl.worldmodel->numtextures ; i++)
{
t = cl.worldmodel->textures[i];
if (!t || !t->texturechain || !(t->texturechain->flags & SURF_DRAWSKY))
continue;
for (s = t->texturechain; s; s = s->texturechain)
// if (!s->culled)
{
DrawGLPoly (s->polys, 0); // Not here.
rs_brushpasses++;
}
}
eglEnable (GL_TEXTURE_2D);
eglColorMask (1,1,1,1);
// eglDepthMask (1);
// Baker: Keep any pixels where stencil wasn't drawn
// for this drawing pass.
eglStencilOp( GL_KEEP, GL_KEEP, GL_KEEP );
eglStencilFunc( GL_EQUAL, 1, ~0 );
// Baker: Now draw the stencil
Sky_DrawSky ();
// Turn it off
eglDisable (GL_STENCIL_TEST);
eglClear (GL_STENCIL_BUFFER_BIT);
}