Normals are not transfered to DirectX 11 shader correctly - random, time-dependent values?

1.3k views Asked by At

Today I was trying to add normal maps to my DirectX 11 application.

Something went wrong. I've decided to output the normals' information instead of color on scene objects to "see" where lies the problem. What surprised me is that the normals' values changes very fast (the colors are blinking each frame). And I'm sure that I don't manipulate with their values during program execution (the position of vertices stays stable, but the normals do not).

Here are two screens for some frames at t1 and t2:

normals output

My vertex structure:

struct MyVertex{//vertex structure
    MyVertex() : weightCount(0), normal(0,0,0){
        //textureCoordinates.x = 1;
        //textureCoordinates.y = 1;
    }
    MyVertex(float x, float y, float z, float u, float v, float nx, float ny, float nz)
        : position(x, y, z), textureCoordinates(u, v), normal(0,0,0), weightCount(0){
    }

    DirectX::XMFLOAT3 position;
    DirectX::XMFLOAT2 textureCoordinates;
    DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);

    //will not be sent to shader (and used only by skinned models)
    int startWeightIndex;
    int weightCount; //=0 means that it's not skinned vertex
};

The corresponding vertex layout:

layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };

Vertex buffer:

D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT; //D3D11_USAGE_DYNAMIC
bd.ByteWidth = sizeof(MyVertex) * structure->getVerticesCount();
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = structure->vertices;
if(device->CreateBuffer(&bd, &InitData, &buffers->vertexBuffer) != S_OK){
    return false;
}

And the shader that output normals "as color" (of course, if I set output.normal to float3(1,1,1), objects stays white):

struct Light
{
    float3 diffuse;
    float3 position;
    float3 direction;
};

cbuffer cbPerObject : register(b0)
{
    float4x4 WVP;
    float4x4 World;

    float4 difColor;
    bool hasTexture;
    bool hasNormMap;
};

cbuffer cbPerFrame : register(b1)
{
    Light light;
};


Texture2D ObjTexture;
Texture2D ObjNormMap;
SamplerState ObjSamplerState;
TextureCube SkyMap;

struct VS_INPUT
{
    float4 position : POSITION;
    float2 tex : TEXCOORD;
    float3 normal : NORMAL;
};

struct VS_OUTPUT
{
    float4 Pos : SV_POSITION;
    float4 worldPos : POSITION;
    float3 normal : NORMAL;
    float2 TexCoord : TEXCOORD;
    float3 tangent : TANGENT;
};

VS_OUTPUT VS(VS_INPUT input)
{
    VS_OUTPUT output;
    //input.position.w = 1.0f;
    output.Pos = mul(input.position, WVP);
    output.worldPos = mul(input.position, World);
    output.normal = input.normal;
    output.tangent = mul(input.tangent, World);
    output.TexCoord = input.tex;
    return output;
}

float4 PS(VS_OUTPUT input) : SV_TARGET
{
    return float4(input.normal, 1.0);
}
//--------------------------------------------------------------------------------------
// Techniques
//--------------------------------------------------------------------------------------
technique10 RENDER
{
    pass P0
    {
        SetVertexShader( CompileShader( vs_4_0, VS() ) );   
        SetPixelShader( CompileShader( ps_4_0, PS() ) );
        SetBlendState( SrcAlphaBlendingAdd, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
    }    
}

Where have I made an mistake? Maybe there are other places in code that can cause that strange behavior (some locking, buffers, dunno...)?


edit:

As 413X suggested, I've run the DirectX Diagnostic:

enter image description here

What is strange that on the small preview, the screen looks the same as in program. But when I investigate that frame (screenshot), I got completely different colors:

enter image description here

Also, here's something strange - I pick the blue pixel and it's says it's black (on the right):

enter image description here


edit 2:

As catflier requested I post some additional code.

The rendering and buffers binding:

//set the object world matrix
DirectX::XMMATRIX objectWorldMatrix = DirectX::XMMatrixIdentity();
DirectX::XMMATRIX rotationMatrix = DirectX::XMMatrixRotationQuaternion(
    DirectX::XMVectorSet(object->getOrientation().getX(), object->getOrientation().getY(), object->getOrientation().getZ(), object->getOrientation().getW())
);
irectX::XMMATRIX scaleMatrix = (
    object->usesScaleMatrix()  
    ? DirectX::XMMatrixScaling(object->getHalfSize().getX(), object->getHalfSize().getY(), object->getHalfSize().getZ())
    : DirectX::XMMatrixScaling(1.0f, 1.0f, 1.0f)
);
DirectX::XMMATRIX translationMatrix = DirectX::XMMatrixTranslation(object->getPosition().getX(), object->getPosition().getY(), object->getPosition().getZ());
objectWorldMatrix = scaleMatrix * rotationMatrix * translationMatrix;

UINT stride = sizeof(MyVertex);
UINT offset = 0;
context->IASetVertexBuffers(0, 1, &buffers->vertexBuffer, &stride, &offset); //set vertex buffer
context->IASetIndexBuffer(buffers->indexBuffer, DXGI_FORMAT_R16_UINT, 0); //set index buffer

//set the constants per object
ConstantBufferStructure constantsPerObject;

//set matrices
DirectX::XMFLOAT4X4 view = myCamera->getView();
DirectX::XMMATRIX camView = XMLoadFloat4x4(&view);
DirectX::XMFLOAT4X4 projection = myCamera->getProjection();
DirectX::XMMATRIX camProjection = XMLoadFloat4x4(&projection);
DirectX::XMMATRIX worldViewProjectionMatrix = objectWorldMatrix * camView * camProjection;

constantsPerObject.worldViewProjection = XMMatrixTranspose(worldViewProjectionMatrix);
constantsPerObject.world = XMMatrixTranspose(objectWorldMatrix);

//draw objects's non-transparent subsets
for(int i=0; i<structure->subsets.size(); i++){
    
    setColorsAndTextures(structure->subsets[i], constantsPerObject, context); //custom method that insert data into constantsPerObject variable

    //bind constants per object to constant buffer and send it to vertex and pixel shaders
    context->UpdateSubresource(constantBuffer, 0, NULL, &constantsPerObject, 0, 0);
    context->VSSetConstantBuffers(0, 1, &constantBuffer);
    context->PSSetConstantBuffers(0, 1, &constantBuffer);

    context->RSSetState(RSCullDefault);
    int start = structure->subsets[i]->getVertexIndexStart();
    int count = structure->subsets[i]->getVertexIndexAmmount();
    context->DrawIndexed(count, start, 0);
}

The rasterizer:

void RendererDX::initCull(ID3D11Device * device){
    D3D11_RASTERIZER_DESC cmdesc;
    ZeroMemory(&cmdesc, sizeof(D3D11_RASTERIZER_DESC));
    cmdesc.FillMode = D3D11_FILL_SOLID;
    cmdesc.CullMode = D3D11_CULL_BACK;
#ifdef GRAPHIC_LEFT_HANDED
    cmdesc.FrontCounterClockwise = true;
#else
    cmdesc.FrontCounterClockwise = false;
#endif

    cmdesc.CullMode = D3D11_CULL_NONE;
    //cmdesc.FillMode = D3D11_FILL_WIREFRAME;
    HRESULT hr = device->CreateRasterizerState(&cmdesc, &RSCullDefault);
}

edit 3:

The debugger output (there are some mismatches in semantics?):

D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. The input stage requires Semantic/Index (NORMAL,0) as input, but it is not provided by the output stage. [ EXECUTION ERROR #342: DEVICE_SHADER_LINKAGE_SEMANTICNAME_NOT_FOUND]

D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. Semantic 'TEXCOORD' is defined for mismatched hardware registers between the output stage and input stage. [ EXECUTION ERROR #343: DEVICE_SHADER_LINKAGE_REGISTERINDEX]

D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. Semantic 'TEXCOORD' in each signature have different min precision levels, when they must bet identical. [ EXECUTION ERROR #3146050: DEVICE_SHADER_LINKAGE_MINPRECISION]

2

There are 2 answers

8
mrvux On

Your data structure on upload doesn't match your Input Layout declaration.

since your data structure for vertex is :

struct MyVertex{//vertex structure
    MyVertex() : weightCount(0), normal(0,0,0){
    //textureCoordinates.x = 1;
    //textureCoordinates.y = 1;
}
MyVertex(float x, float y, float z, float u, float v, float nx, float ny, float nz)
    : position(x, y, z), textureCoordinates(u, v), normal(0,0,0), weightCount(0){
}

DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);

//will not be sent to shader (and used only by skinned models)
int startWeightIndex;
int weightCount; //=0 means that it's not skinned vertex

};

startWeightIndex and weightCount will be copied into your vertex buffer (even if they do not contain anything useful.

If you check sizeof(MyVertex), you will have a size of 40.

Now let's look at your input layout declaration (whether you use automatic offset or not is irrelevant):

layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };

From what you see here, you are declaring a data structure of (12+8+12) = 32 bytes, which of course does not match your vertex size.

So first vertex will be fetched properly, but next ones will start to use invalid data (as the Input Assembler doesn't know that your data structure is bigger than what you specified to it).

Two ways to fix it:

1/ Strip your vertex declaration

In that case you modify your vertex structure to match your input declaration (I removed constructors for brevity:

struct MyVertex
{//vertex structure
    DirectX::XMFLOAT3 position;
    DirectX::XMFLOAT2 textureCoordinates;
    DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
};

Now your vertex structure exactly matches your declaration, so vertices will be fetched properly.

2/Adapt your Input Layout declaration:

In that case you change your layout to make sure that all data contained in your buffer is declared, so it can be taken into account by the Input Assembler (see below)

Now your declaration becomes:

layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[3] = { "STARTWEIGHTINDEX", 0, DXGI_FORMAT_R32_SINT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[4] = { "WEIGHTCOUNT", 0, DXGI_FORMAT_R32_SINT, 0, 36, D3D11_INPUT_PER_VERTEX_DATA, 0 };

So that means you inform the Input assembler of all the data that your structure contains.

In that case even if the data is not needed by your Vertex Shader, as you specified a full data declaration, Input assembler will safely ignore STARTWEIGHTINDEX and WEIGHTCOUNT, but will respect your whole structure padding.

7
Alex On

I am pretty sure your bytes are missaligned. A float is 4 bytes me thinks and a float4 is then 16 bytes. And it wants to be 16 byte aligned. So observe:

layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };

The value; 0,12,20. (AlignedByteOffset) Is where the value then starts. Which would mean; Position starts at 0. Texcoord starts at the end of a float3, which gives you wrong results. Because look inside the shader:

struct VS_INPUT
{
    float4 position : POSITION;
    float2 tex : TEXCOORD;
    float3 normal : NORMAL;
};

And Normal at float3+float2. So generally, you want to align things more consistantly. Maybe even "padding" to fill the spaces to keep all the variables at 16 bytes aligned.

But to keep it more simple for you. You want to switch that statement to:

layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
    layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 };
    layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 };

What happens now? Well, the thing aligns itself automagically, however it can be less optimal. But one thing about shaders, try to keep it 16 byte aligned.