D3D: Using start index and base vertex instead of buffer offsets

This commit is contained in:
Yuriy O'Donnell 2014-10-25 19:10:41 +02:00
parent 48ba55203b
commit c35847b795
1 changed files with 9 additions and 5 deletions

View File

@ -127,13 +127,17 @@ void VertexManager::Draw(u32 stride)
u32 components = VertexLoaderManager::GetCurrentVertexFormat()->m_components;
u32 indices = IndexGenerator::GetIndexLen();
D3D::context->IASetVertexBuffers(0, 1, &m_buffers[m_currentBuffer], &stride, &m_vertexDrawOffset);
D3D::context->IASetIndexBuffer(m_buffers[m_currentBuffer], DXGI_FORMAT_R16_UINT, m_indexDrawOffset);
u32 zero = 0;
D3D::context->IASetVertexBuffers(0, 1, &m_buffers[m_currentBuffer], &stride, &zero);
D3D::context->IASetIndexBuffer(m_buffers[m_currentBuffer], DXGI_FORMAT_R16_UINT, 0);
u32 baseVertex = m_vertexDrawOffset / stride;
u32 startIndex = m_indexDrawOffset / sizeof(u16);
if (current_primitive_type == PRIMITIVE_TRIANGLES)
{
D3D::context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
D3D::context->DrawIndexed(indices, 0, 0);
D3D::context->DrawIndexed(indices, startIndex, baseVertex);
INCSTAT(stats.thisFrame.numDrawCalls);
}
else if (current_primitive_type == PRIMITIVE_LINES)
@ -153,7 +157,7 @@ void VertexManager::Draw(u32 stride)
{
((DX11::Renderer*)g_renderer)->ApplyCullDisable(); // Disable culling for lines and points
D3D::context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_LINELIST);
D3D::context->DrawIndexed(indices, 0, 0);
D3D::context->DrawIndexed(indices, startIndex, baseVertex);
INCSTAT(stats.thisFrame.numDrawCalls);
D3D::context->GSSetShader(nullptr, nullptr, 0);
@ -177,7 +181,7 @@ void VertexManager::Draw(u32 stride)
{
((DX11::Renderer*)g_renderer)->ApplyCullDisable(); // Disable culling for lines and points
D3D::context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_POINTLIST);
D3D::context->DrawIndexed(indices, 0, 0);
D3D::context->DrawIndexed(indices, startIndex, baseVertex);
INCSTAT(stats.thisFrame.numDrawCalls);
D3D::context->GSSetShader(nullptr, nullptr, 0);