0

I begin to use the "instancied model" technique to draw my scene and I've got a problem with normal's vertex

I give to HLSL the MATRIX ( rotation / scale / position ) to draw each instance of model but I can't obtain the good normal after rotate them.

The 3D computation of my model works fine after MATRIX apply , but the light is really strange depending of normal orientation.

struct InstancingVSinput
{
    float4 Position : SV_POSITION0;
    float4 Norm : NORMAL0;
    float2 TexCoord : TEXCOORD0;
};

struct InstancingVSoutput
{
    float4 Position : POSITION0;
    float3 Norm : NORMAL0;
    float2 TexCoord : TEXCOORD0;
};

    InstancingVSoutput InstancingVS(InstancingVSinput input, 
                                    float4 InstPos : POSITION1, float4 InstTexCoord : TEXCOORD1,
                                    float4 Mat1 : POSITION2, float4 Mat2 : POSITION3, float4 Mat3 : POSITION4, float4 Mat4 : POSITION5)
    {
        InstancingVSoutput output;

        float4x4 O = float4x4(Mat1, Mat2, Mat3, Mat4);

        float4 pos = mul(input.Position,xWorld);
        pos = mul(input.Position, O);
        pos = InstPos + pos;

        O = transpose(O);

        float3 norm = normalize((float3)input.Norm);
        norm = mul(norm, (float3x3)O);

        pos = mul(pos, WVP);

        output.Position = pos;
        output.Norm = norm;

        output.TexCoord = float2((input.TexCoord.x / 2.0f) + (1.0f / 2.0f * InstTexCoord.x),
                                 (input.TexCoord.y / 2.0f) + (1.0f / 2.0f * InstTexCoord.y));
        return output;
    }

    float4 InstancingPS(InstancingVSoutput input) : COLOR0
    {
        float4 C = tex2D(TextureSampler, input.TexCoord);
        float3 N = input.Norm;
        C.rgb *= dot(float3(0, -1, -1), N);
        return C;
    }

Should you suggest me a correct way to retrieve my normal after rotation ?

Thanx

Chris

4 Answers4

0

If your instance matrix contains any scaling, you need to normalize the normals after the multiplication. Additionally, if the scaling is not uniform, you must multiply the input normals by the inverse transpose of the matrix. Currently you are only applying the transpose, but not the inverse.

Calculating the inverse of a matrix is an expensive operation, though, so consider precomputing it outside the shader.

Quinchilion
  • 912
  • 6
  • 16
0

Here are the following lines I've modify to express your solution :

1 - Prepare the instances properties , just add a new one for Inverse Transpose Matrix ( precomputedted )

        Matrix MatRot = Matrix.CreateRotationX(3 - rnd.Next(6)) * 
                        Matrix.CreateRotationY(3 - rnd.Next(6)) * 
                        Matrix.CreateRotationZ(3 - rnd.Next(6));
        instances[i].Geometry = Matrix.CreateScale(0.5f + rnd.Next(2)) * MatRot;
        instances[i].Inverse = Matrix.Transpose(Matrix.Invert(MatRot));

2 - The shader recept this new matrix and use it on the each model's normal

float4x4 WVP;
float4x4 WV;
float4x4 xLightView;
float4x4 xLightProj;
float4x4 xWorld;

texture cubeTexture;
sampler TextureSampler = sampler_state
{
    texture = <cubeTexture>;
    mipfilter = LINEAR;
    minfilter = LINEAR;
    magfilter = LINEAR;
};

struct InstancingVSinput
{
    float4 Position : SV_POSITION;
    float4 Norm : NORMAL0;
    float2 TexCoord : TEXCOORD0;
};

struct InstancingVSoutput
{
    float4 Position : POSITION0;
    float4 Norm : NORMAL0;
    float2 TexCoord : TEXCOORD0;
};

InstancingVSoutput InstancingVS(InstancingVSinput input, 
                                float4 InstPos : POSITION1, float4 InstTexCoord : TEXCOORD1,
                                float4 Mat1 : POSITION2, float4 Mat2 : POSITION3, float4 Mat3 : POSITION4, float4 Mat4 : POSITION5,
                                float4 Mat5 : POSITION6, float4 Mat6 : POSITION7, float4 Mat7 : POSITION8, float4 Mat8 : POSITION9)
{
    InstancingVSoutput output;

    float4x4 O = float4x4(Mat1, Mat2, Mat3, Mat4);
    float4x4 I = float4x4(Mat5, Mat6, Mat7, Mat8);

    float4 pos = mul(input.Position,xWorld);
    pos = mul(input.Position, O);
    pos = InstPos + pos;

    pos = mul(pos, WVP);
    output.Position = pos;

    output.Norm = normalize(mul(input.Norm,I));

    output.TexCoord = float2((input.TexCoord.x / 2.0f) + (1.0f / 2.0f * InstTexCoord.x),
                             (input.TexCoord.y / 2.0f) + (1.0f / 2.0f * InstTexCoord.y));

    return output;
}

float4 InstancingPS(InstancingVSoutput input) : COLOR0
{
    float4 C = tex2D(TextureSampler, input.TexCoord);

    C.rgb *= dot(input.Norm , normalize(float3(0, -1, -1)));
    return C;
}

technique Instancing
{
    pass Pass0
    {
        VertexShader = compile vs_5_0 InstancingVS();
        PixelShader = compile ps_5_0 InstancingPS();
    }
}

An idea ?

0

As you can see , my faces have not a good ligh exposition depending of the normal

Click to see picture

0

The problem is solved , it's OK. In fact , HLSL code was good ... but the problem was persisting because of a bad computation of vertex normals. Now I have good faces with good light direction.