I have reduced a previous rending problem to a core where I am stuck.
I have a vertex buffer, consisting of 4 vertices, arranged in a plane (labeled 0
to 3
):
1. .2
0. .3
and an according index buffer {0,1,2,3,0}
.
Now, when I render with D3D11_PRIMITIVE_TOPOLOGY_LINESTRIP
, I achieve the expected image:
__
| |
|__|
However, when I render with D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP
the result is:
| /|
|/ |
Note that no filling of triangles is performed.
Even more confusing, when using D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST
the result is:
|
|
If I change the index buffer to {0,1,2,0,2,3}
it renders:
| /
|/
That is, just one pixel line between the first two vertices are being drawn.
I have reduced my shaders to the most primitive examples:
Vertex shader:
struct VertexInputType
{
float4 position : POSITION;
};
struct PixelInputType
{
float4 position : SV_POSITION;
};
PixelInputType VertexShader(VertexInputType input)
{
PixelInputType output;
input.position.w = 1.0f;
output.position = input.position;
return output;
}
Pixel shader:
struct PixelInputType
{
float4 position : SV_POSITION;
};
float4 PixelShader(PixelInputType input) : SV_TARGET
{
float4 color;
color.r = 0;
color.g = 0;
color.b = 0;
color.a = 1;
return color;
}
As vertices I'm using DirectX::XMFLOAT3
:
D3D11_INPUT_ELEMENT_DESC polygon_layout[1];
polygon_layout[0].SemanticName = "POSITION";
polygon_layout[0].SemanticIndex = 0;
polygon_layout[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygon_layout[0].InputSlot = 0;
polygon_layout[0].AlignedByteOffset = 0;
polygon_layout[0].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygon_layout[0].InstanceDataStepRate = 0;
d3d11_device->CreateInputLayout(polygon_layout, 1, compiled_vshader_buffer->GetBufferPointer(), compiled_vshader_buffer->GetBufferSize(), &input_layout);
D3D11_BUFFER_DESC vertex_buffer_desc;
vertex_buffer_desc.Usage = D3D11_USAGE_DEFAULT;
vertex_buffer_desc.ByteWidth = sizeof(DirectX::XMFLOAT3) * 4;
vertex_buffer_desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertex_buffer_desc.CPUAccessFlags = 0;
vertex_buffer_desc.MiscFlags = 0;
vertex_buffer_desc.StructureByteStride = 0;
DirectX::XMFLOAT3 vertices[4];
vertices[0].x = -0.5; vertices[0].y = -0.5; vertices[0].z = 0;
vertices[1].x = -0.5; vertices[1].y = 0.5; vertices[1].z = 0;
vertices[2].x = 0.5; vertices[2].y = 0.5; vertices[2].z = 0;
vertices[3].x = 0.5; vertices[3].y = -0.5; vertices[3].z = 0;
D3D11_SUBRESOURCE_DATA vertex_buffer_data;
vertex_buffer_data.pSysMem = vertices;
vertex_buffer_data.SysMemPitch = 0;
vertex_buffer_data.SysMemSlicePitch = 0;
hr = d3d11_device->CreateBuffer(&vertex_buffer_desc, &vertex_buffer_data, &vertex_buffer);
D3D11_BUFFER_DESC index_buffer_desc;
index_buffer_desc.Usage = D3D11_USAGE_DEFAULT;
index_buffer_desc.ByteWidth = sizeof(int32_t) * 6;
index_buffer_desc.BindFlags = D3D11_BIND_INDEX_BUFFER;
index_buffer_desc.CPUAccessFlags = 0;
index_buffer_desc.MiscFlags = 0;
index_buffer_desc.StructureByteStride = 0;
int32_t indices[6];
indices[0] = 0;
indices[1] = 1;
indices[2] = 2;
indices[3] = 2;
indices[4] = 3;
indices[5] = 0;
D3D11_SUBRESOURCE_DATA index_buffer_data;
index_buffer_data.pSysMem = indices;
index_buffer_data.SysMemPitch = 0;
index_buffer_data.SysMemSlicePitch = 0;
hr = d3d11_device->CreateBuffer(&index_buffer_desc, &index_buffer_data, &index_buffer);
// during rendering I set:
unsigned int stride = sizeof(DirectX::XMFLOAT3);
unsigned int offset = 0;
d3d11_context->IASetVertexBuffers(0, 1, &vertex_buffer, &stride, &offset);
d3d11_context->IASetIndexBuffer(index_buffer, DXGI_FORMAT_R32_UINT, 0);
d3d11_context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
d3d11_context->RSSetState(rasterizer_state);
d3d11_context->IASetInputLayout(input_layout);
d3d11_context->VSSetShader(vertex_shader, NULL, 0);
d3d11_context->PSSetShader(pixel_shader, NULL, 0);
// and render with:
d3d11_context->DrawIndexed(6, 0, 0);
When I look at the shaders with the ID3D11ShaderReflection::GetGSInputPrimitive()
, I receive D3D_PRIMITIVE_UNDEFINED
for both the vertex shader and pixel shader.
I am setting the rasterizer stage with D3D11_FILL_SOLID
and D3D11_CULL_NONE
.
Is there any setting or state in the D3D11 context that could explain such a behavior? I'm happy for any ideas where to look. Thanks in advance!