GPU/HW: Use uint in CPU-side vertex attributes

This commit is contained in:
Connor McLaughlin 2020-04-23 15:55:49 +10:00
parent e2f5905cd6
commit 990147b7e9
2 changed files with 5 additions and 4 deletions

View File

@ -247,8 +247,8 @@ bool GPU_HW_D3D11::CreateBatchInputLayout()
static constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 4> attributes = {
{{"ATTR", 0, DXGI_FORMAT_R32G32_SINT, 0, offsetof(BatchVertex, x), D3D11_INPUT_PER_VERTEX_DATA, 0},
{"ATTR", 1, DXGI_FORMAT_R8G8B8A8_UNORM, 0, offsetof(BatchVertex, color), D3D11_INPUT_PER_VERTEX_DATA, 0},
{"ATTR", 2, DXGI_FORMAT_R32_SINT, 0, offsetof(BatchVertex, u), D3D11_INPUT_PER_VERTEX_DATA, 0},
{"ATTR", 3, DXGI_FORMAT_R32_SINT, 0, offsetof(BatchVertex, texpage), D3D11_INPUT_PER_VERTEX_DATA, 0}}};
{"ATTR", 2, DXGI_FORMAT_R32_UINT, 0, offsetof(BatchVertex, u), D3D11_INPUT_PER_VERTEX_DATA, 0},
{"ATTR", 3, DXGI_FORMAT_R32_UINT, 0, offsetof(BatchVertex, texpage), D3D11_INPUT_PER_VERTEX_DATA, 0}}};
// we need a vertex shader...
GPU_HW_ShaderGen shadergen(m_host_display->GetRenderAPI(), m_resolution_scale, m_true_color, m_scaled_dithering,

View File

@ -274,8 +274,9 @@ bool GPU_HW_OpenGL::CreateVertexBuffer()
glVertexAttribIPointer(0, 2, GL_INT, sizeof(BatchVertex), reinterpret_cast<void*>(offsetof(BatchVertex, x)));
glVertexAttribPointer(1, 4, GL_UNSIGNED_BYTE, true, sizeof(BatchVertex),
reinterpret_cast<void*>(offsetof(BatchVertex, color)));
glVertexAttribIPointer(2, 1, GL_INT, sizeof(BatchVertex), reinterpret_cast<void*>(offsetof(BatchVertex, u)));
glVertexAttribIPointer(3, 1, GL_INT, sizeof(BatchVertex), reinterpret_cast<void*>(offsetof(BatchVertex, texpage)));
glVertexAttribIPointer(2, 1, GL_UNSIGNED_INT, sizeof(BatchVertex), reinterpret_cast<void*>(offsetof(BatchVertex, u)));
glVertexAttribIPointer(3, 1, GL_UNSIGNED_INT, sizeof(BatchVertex),
reinterpret_cast<void*>(offsetof(BatchVertex, texpage)));
glBindVertexArray(0);
glGenVertexArrays(1, &m_attributeless_vao_id);