Question

Why do my squares do this (index buffer issue)?

enter image description here

I have set the following vertices and indices:

struct VERTEX
{
    float X, Y, Z;
    float R, G, B;
};

const unsigned short SquareVertices::indices[ 6 ] = {
    0, 1, 2,
    0, 2, 3
};

const VERTEX SquareVertices::vertices[ 4 ] = {
    -1.0f, -1.0f, 1.0f,     1.0f, 0.0f, 0.0f,
    -1.0f, 1.0f, 1.0f,      0.0f, 1.0f, 0.0f,
    1.0f, 1.0f, 1.0f,       0.0f, 0.0f, 1.0f,
    1.0f, -1.0f, 1.0f,      0.0f, 1.0f, 0.0f
};

I have done initialisation like this:

void Player::Initialize( )
{
    // Create vertex buffer
    D3D11_BUFFER_DESC bd = { 0 };
    bd.ByteWidth = sizeof( VERTEX )* ARRAYSIZE( Player::vertices );
    bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;

    D3D11_SUBRESOURCE_DATA srd = { Player::vertices, 0, 0 };

    d3dDevice->CreateBuffer( &bd, &srd, &vertexbuffer );

    // Create the index buffer
    D3D11_BUFFER_DESC ibd = { 0 };
    ibd.ByteWidth = sizeof( short )* ARRAYSIZE( Player::indices );
    ibd.BindFlags = D3D11_BIND_INDEX_BUFFER;

    D3D11_SUBRESOURCE_DATA isrd = { Player::indices, 0, 0 };

    d3dDevice->CreateBuffer( &ibd, &isrd, &indexbuffer );
}

My constant buffer that is passed to the shader's setup looks like this:

void Game::SetUpConstantBuffer()
{
    D3D11_BUFFER_DESC bd = { 0 };

    bd.Usage = D3D11_USAGE_DEFAULT;
    bd.ByteWidth = 64;
    bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;

    d3dDevice->CreateBuffer( &bd, nullptr, &constantbuffer ); 
    d3dDeviceContext->VSSetConstantBuffers( 0, 1, constantbuffer.GetAddressOf( ) );
}

My input layout looks like this:

// initialize input layout
D3D11_INPUT_ELEMENT_DESC ied[ ] =
{
    { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
    { "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};

My drawing section looks like this:

// set the vertex buffer
            UINT stride = sizeof( VERTEX );
            UINT offset = 0;
                d3dDeviceContext->IASetVertexBuffers( 0, 1, player->vertexbuffer.GetAddressOf( ), &stride, &offset );
                d3dDeviceContext->IASetPrimitiveTopology( D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP ); 
                d3dDeviceContext->IASetIndexBuffer( player->indexbuffer.Get( ), DXGI_FORMAT_R32_UINT, 0 );
                d3dDeviceContext->UpdateSubresource( constantbuffer.Get( ), 0, 0, &matFinal[ 0 ], 0, 0 ); // set the new values for the constant buffer
                d3dDeviceContext->DrawIndexed( ARRAYSIZE( player->indices ), 0, 0 ); // draw 3 vertices, starting from vertex 0

I have set up my perspective and camera like this:

void Game::SetUpViewTransformations( )
{
    XMVECTOR vecCamPosition = XMVectorSet( 0.0f, 0.0f, -10.0f, 0 );
    XMVECTOR vecCamLookAt = XMVectorSet( 0, 0, 0, 0 );
    XMVECTOR vecCamUp = XMVectorSet( 0, 1, 0, 0 );

    matView = XMMatrixLookAtLH( vecCamPosition, vecCamLookAt, vecCamUp );
}

void Game::SetUpMatProjection( )
{
    CoreWindow^ window = CoreWindow::GetForCurrentThread( );    // get the window pointer

    matProjection = XMMatrixPerspectiveFovLH(
        XMConvertToRadians( 45 ),                                      // the field of view
        ( FLOAT )window->Bounds.Width / ( FLOAT )window->Bounds.Height,  // aspect ratio
        1,                                                           // the near view-plane
        100 );                                                        // the far view-plan
}

Extra info

If I change my IASetIndexBuffer 2nd parameter to DXGI_FORMAT_R16_UINT it draws this:

enter image description here

Était-ce utile?

La solution

Your second parameter to IASetIndexBuffer should be DXGI_FORMAT_R16_UINT. Shorts are 16-bit, not 32-bit. I'm no DirectX expert, but I think you need to pass D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST to IASetPrimitiveTopology.

Licencié sous: CC-BY-SA avec attribution
Non affilié à StackOverflow
scroll top