1

I'm having a bit of trouble with the Wavefront exporter of Blender.

Actually I'm developing a parser for .obj mesh. I created a mesh in Blender, set up its UVs, normals and so on... When I'm loading this mesh with my loader, UVs get placed correctly but not normals, in fact they are all flipped!
If I flip all normals in Blender, the mesh is correctly displayed in my app. The strange thing is that if a create a simple cube, export it, load it, display it in my app, it'll be correctly displayed without having to "badly" flip in Blender.
I tried a lot with the parameters of exporter, I set my mesh smooth so that I'm sure I get per vertex normals...
I tried by setting backface culling ON/OFF, in fact I tried a lot of thing I found on teh Internet without result.

Here is a bit of my code :

//I add information about vertices / uv / normals in 3 collections

//vertex
case "v":
    l_vertex = new VPrimVertex(new VPrimVector3(Double.Parse(l_words[1]), Double.Parse(l_words[2]), Double.Parse(l_words[3])));
    ListeVertices.Add(l_vertex);
    break;
//vertex normal
case "vn":
    ListeNormales.Add(new VPrimVector3(Double.Parse(l_words[1]), Double.Parse(l_words[2]), Double.Parse(l_words[3])));
    break;
//vertex UV
case "vt":
    //pas la même orientation UV entre Wavefront et WPF d'où le 1-V
    ListeUVs.Add(new VPrimPointUV(Double.Parse(l_words[1]), 1.0-Double.Parse(l_words[2])));
    break;
//face
case "f":
    //pas un triangle
    if (l_words.Length > 4)
    {
        Triangule(l_words);
    }
    //triangle
    else
    {
        ComputeFace(l_words);
    }
    break;

. . .

Computeface(){
...
//for each face :  
    //for each vertex of a face :   
    //p_face[i] contains strings like v1/vt1/vn1   

    l_stringVertex = p_face[i].Split('/');
    l_vertex = ListeVertices.ElementAt(int.Parse(l_stringVertex[0]) - 1);
    l_vertex.AddAdjacent(l_face);
    l_vertex.Normale = ListeNormales.ElementAt(int.Parse(l_stringVertex[2]) - 1);

    l_face.AddVertex(l_vertex);
    l_face.UVList.Add(m_listeUVs.ElementAt(int.Parse(l_stringVertex[1]) - 1));
    ...
}

And then I fill WPF MeshGeometry3D with all thoses datas.

private void Finalise()
    {
        Point3D l_point3D;
        Vector3D l_vector;
        Point l_point;
        VPrimPointUV l_pointUV;
        int i;

        foreach (VPrimFace l_face in Racine.SubdivideList)
        {
            i = 0;
            foreach (VPrimVertex l_vertex in l_face.VertexList)
            {
                l_point3D = new Point3D(l_vertex.Position.X, l_vertex.Position.Y, l_vertex.Position.Z);
                CurrentMesh.MeshGeometry3D.Positions.Add(l_point3D);

                l_vertex.moyenneNormaleFacesAdjacentes();
                l_vector = new Vector3D(l_vertex.Normale.X, l_vertex.Normale.Y, l_vertex.Normale.Z);
                CurrentMesh.MeshGeometry3D.Normals.Add(l_vector);

                if (Texture)
                {
                    l_pointUV = l_face.UVList.ElementAt(i);
                    l_point = new Point(l_pointUV.U, l_pointUV.V);
                    CurrentMesh.MeshGeometry3D.TextureCoordinates.Add(l_point);
                }

                CurrentMesh.MeshGeometry3D.TriangleIndices.Add(CurrentMesh.MeshGeometry3D.Positions.Count-1);

                i += 1;
            }
        }
    }

If you have any idea...Thank you

On Blender 2.66

0 Answers0