UV Atlas Generation and Seam Removal
Posted
by
P. Avery
on Game Development
See other posts from Game Development
or by P. Avery
Published on 2013-08-14T19:57:43Z
Indexed on
2014/08/20
16:37 UTC
Read the original article
Hit count: 325
I'm generating light maps for scene mesh objects using DirectX's UV Atlas Tool( D3DXUVAtlasCreate() ). I've succeeded in generating an atlas, however, when I try to render the mesh object using the atlas the seams are visible on the mesh. Below are images of a lightmap generated for a cube. Here is the code I use to generate a uv atlas for a cube:
struct sVertexPosNormTex
{
D3DXVECTOR3 vPos, vNorm;
D3DXVECTOR2 vUV;
sVertexPosNormTex(){}
sVertexPosNormTex( D3DXVECTOR3 v, D3DXVECTOR3 n, D3DXVECTOR2 uv )
{
vPos = v;
vNorm = n;
vUV = uv;
}
~sVertexPosNormTex()
{
}
};
// create a light map texture to fill programatically
hr = D3DXCreateTexture( pd3dDevice, 128, 128, 1, 0, D3DFMT_A8R8G8B8,
D3DPOOL_MANAGED, &pLightmap );
if( FAILED( hr ) )
{
DebugStringDX( "Main", "Failed to D3DXCreateTexture( lightmap )", __LINE__, hr );
return hr;
}
// get the zero level surface from the texture
IDirect3DSurface9 *pS = NULL;
pLightmap->GetSurfaceLevel( 0, &pS );
// clear surface
pd3dDevice->ColorFill( pS, NULL, D3DCOLOR_XRGB( 0, 0, 0 ) );
// load a sample mesh
DWORD dwcMaterials = 0;
LPD3DXBUFFER pMaterialBuffer = NULL;
V_RETURN( D3DXLoadMeshFromX( L"cube3.x", D3DXMESH_MANAGED, pd3dDevice, &pAdjacency, &pMaterialBuffer, NULL, &dwcMaterials, &g_pMesh ) );
// generate adjacency
DWORD *pdwAdjacency = new DWORD[ 3 * g_pMesh->GetNumFaces() ];
g_pMesh->GenerateAdjacency( 1e-6f, pdwAdjacency );
// create light map coordinates
LPD3DXMESH pMesh = NULL;
LPD3DXBUFFER pFacePartitioning = NULL, pVertexRemapArray = NULL;
FLOAT resultStretch = 0;
UINT numCharts = 0;
hr = D3DXUVAtlasCreate( g_pMesh, 0, 0, 128, 128, 3.5f, 0, pdwAdjacency, NULL, NULL, NULL, NULL, NULL, 0, &pMesh,
&pFacePartitioning, &pVertexRemapArray, &resultStretch, &numCharts );
if( SUCCEEDED( hr ) )
{
// release and set mesh
SAFE_RELEASE( g_pMesh );
g_pMesh = pMesh;
// write mesh to file
hr = D3DXSaveMeshToX( L"cube4.x",
g_pMesh,
0,
( const D3DXMATERIAL* )pMaterialBuffer->GetBufferPointer(),
NULL,
dwcMaterials,
D3DXF_FILEFORMAT_TEXT );
if( FAILED( hr ) )
{
DebugStringDX( "Main", "Failed to D3DXSaveMeshToX() at OnD3D9CreateDevice()", __LINE__, hr );
}
// fill the the light map
hr = BuildLightmap( pS, g_pMesh );
if( FAILED( hr ) )
{
DebugStringDX( "Main", "Failed to BuildLightmap()", __LINE__, hr );
}
}
else
{
DebugStringDX( "Main", "Failed to D3DXUVAtlasCreate() at OnD3D9CreateDevice()", __LINE__, hr );
}
SAFE_RELEASE( pS );
SAFE_DELETE_ARRAY( pdwAdjacency );
SAFE_RELEASE( pFacePartitioning );
SAFE_RELEASE( pVertexRemapArray );
SAFE_RELEASE( pMaterialBuffer );
Here is code to fill lightmap texture:
HRESULT BuildLightmap( IDirect3DSurface9 *pS, LPD3DXMESH pMesh )
{
HRESULT hr = S_OK;
// validate lightmap texture surface and mesh
if( !pS
|| !pMesh )
return E_POINTER;
// lock the mesh vertex buffer
sVertexPosNormTex *pV = NULL;
pMesh->LockVertexBuffer( D3DLOCK_READONLY, ( void** )&pV );
// lock the mesh index buffer
WORD *pI = NULL;
pMesh->LockIndexBuffer( D3DLOCK_READONLY, ( void** )&pI );
// get the lightmap texture surface description
D3DSURFACE_DESC desc;
pS->GetDesc( &desc );
// lock the surface rect to fill with color data
D3DLOCKED_RECT rct;
hr = pS->LockRect( &rct, NULL, 0 );
if( FAILED( hr ) )
{
DebugStringDX( "main.cpp:", "Failed to IDirect3DTexture9::LockRect()", __LINE__, hr );
return hr;
}
// iterate the pixels of the lightmap texture
// check each pixel to see if it lies between the uv coordinates of a cube face
BYTE *pBuffer = ( BYTE* )rct.pBits;
for( UINT y = 0; y < desc.Height; ++y )
{
BYTE* pBufferRow = ( BYTE* )pBuffer;
for( UINT x = 0; x < desc.Width * 4; x+=4 )
{
// determine the pixel's uv coordinate
D3DXVECTOR2 p( ( ( float )x / 4.0f ) / ( float )desc.Width + 0.5f / 128.0f, y / ( float )desc.Height + 0.5f / 128.0f );
// for each face of the mesh
// check to see if the pixel lies within the face's uv coordinates
for( UINT i = 0; i < 3 * pMesh->GetNumFaces(); i +=3 )
{
sVertexPosNormTex v[ 3 ];
v[ 0 ] = pV[ pI[ i + 0 ] ];
v[ 1 ] = pV[ pI[ i + 1 ] ];
v[ 2 ] = pV[ pI[ i + 2 ] ];
if( TexcoordIsWithinBounds( v[ 0 ].vUV, v[ 1 ].vUV, v[ 2 ].vUV, p ) )
{
// the pixel lies b/t the uv coordinates of a cube face
// light contribution functions aren't needed yet
//D3DXVECTOR3 vPos = TexcoordToPos( v[ 0 ].vPos, v[ 1 ].vPos, v[ 2 ].vPos, v[ 0 ].vUV, v[ 1 ].vUV, v[ 2 ].vUV, p );
//D3DXVECTOR3 vNormal = v[ 0 ].vNorm;
// set the color of this pixel red( for demo )
BYTE ba[] = { 0, 0, 255, 255, };
//ComputeContribution( vPos, vNormal, g_sLight, ba );
// copy the byte array into the light map texture
memcpy( ( void* )&pBufferRow[ x ], ( void* )ba, 4 * sizeof( BYTE ) );
}
}
}
// go to next line of the texture
pBuffer += rct.Pitch;
}
// unlock the surface rect
pS->UnlockRect();
// unlock mesh vertex and index buffers
pMesh->UnlockIndexBuffer();
pMesh->UnlockVertexBuffer();
// write the surface to file
hr = D3DXSaveSurfaceToFile( L"LightMap.jpg", D3DXIFF_JPG, pS, NULL, NULL );
if( FAILED( hr ) )
DebugStringDX( "Main.cpp", "Failed to D3DXSaveSurfaceToFile()", __LINE__, hr );
return hr;
}
bool TexcoordIsWithinBounds( const D3DXVECTOR2 &t0, const D3DXVECTOR2 &t1, const D3DXVECTOR2 &t2,
const D3DXVECTOR2 &p )
{
// compute vectors
D3DXVECTOR2 v0 = t1 - t0,
v1 = t2 - t0,
v2 = p - t0;
float f00 = D3DXVec2Dot( &v0, &v0 );
float f01 = D3DXVec2Dot( &v0, &v1 );
float f02 = D3DXVec2Dot( &v0, &v2 );
float f11 = D3DXVec2Dot( &v1, &v1 );
float f12 = D3DXVec2Dot( &v1, &v2 );
// Compute barycentric coordinates
float invDenom = 1 / ( f00 * f11 - f01 * f01 );
float fU = ( f11 * f02 - f01 * f12 ) * invDenom;
float fV = ( f00 * f12 - f01 * f02 ) * invDenom;
// Check if point is in triangle
if( ( fU >= 0 ) && ( fV >= 0 ) && ( fU + fV < 1 ) )
return true;
return false;
}
I believe the problem comes from the difference between the lightmap uv coordinates and the pixel center coordinates...for example, here are the lightmap uv coordinates( generated by D3DXUVAtlasCreate() ) for a specific face( tri ) within the mesh, keep in mind that I'm using the mesh uv coordinates to write the pixels for the texture:
v[ 0 ].uv = D3DXVECTOR2( 0.003581, 0.295631 );
v[ 1 ].uv = D3DXVECTOR2( 0.003581, 0.003581 );
v[ 2 ].uv = D3DXVECTOR2( 0.295631, 0.003581 );
the lightmap texture size is 128 x 128 pixels. The upper-left pixel center coordinates are: float halfPixel = 0.5 / 128 = 0.00390625; D3DXVECTOR2 pixelCenter = D3DXVECTOR2( halfPixel, halfPixel );
will the mapping and sampling of the lightmap texture will require that an offset be taken into account or that the uv coordinates are snapped to the pixel centers..?
...Any ideas on the best way to approach this situation would be appreciated...What are the common practices?
© Game Development or respective owner