Esempio n. 1
0
        public SSMesh_wfOBJ(SSAssetManager.Context ctx, string filename)
        {
            this.srcFilename = ctx.fullResourcePath(filename);


            Console.WriteLine("SSMesh_wfOBJ: loading wff {0}", filename);
            WavefrontObjLoader wff_data = new WavefrontObjLoader(ctx, filename);

            Console.WriteLine("wff vertex count = {0}", wff_data.positions.Count);
            Console.WriteLine("wff face count = {0}", wff_data.numFaces);

            _loadData(ctx, wff_data);

            // update radius
            float maxRadSq = 0f;

            foreach (var subset in geometrySubsets)
            {
                foreach (var vtx in subset.triangleMesh.lastAssignedVertices)
                {
                    maxRadSq = Math.Max(maxRadSq, vtx.Position.LengthSquared);
                }
            }
            _boundingSphereRadius = (float)Math.Sqrt(maxRadSq);
        }
Esempio n. 2
0
        private SSMeshOBJSubsetData _loadMaterialSubset(SSAssetManager.Context ctx, WavefrontObjLoader wff,
                                                        WavefrontObjLoader.MaterialInfoWithFaces objMatSubset)
        {
            // generate renderable geometry data...
            SSVertex_PosNormTexDiff[] vertices;
            UInt16[] triIndices, wireframeIndices;
            VertexSoup_VertexFormatBinder.generateDrawIndexBuffer(
                wff, out triIndices, out vertices);
            wireframeIndices = OpenTKHelper.generateLineIndicies(triIndices);
            SSMeshOBJSubsetData subsetData = new SSMeshOBJSubsetData(
                vertices, triIndices, wireframeIndices);

            // setup the material...
            // load and link every texture present
            subsetData.TextureMaterial = new SSTextureMaterial();
            if (objMatSubset.mtl.diffuseTextureResourceName != null)
            {
                subsetData.TextureMaterial.diffuseTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.diffuseTextureResourceName);
            }
            if (objMatSubset.mtl.ambientTextureResourceName != null)
            {
                subsetData.TextureMaterial.ambientTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.ambientTextureResourceName);
            }
            if (objMatSubset.mtl.bumpTextureResourceName != null)
            {
                subsetData.TextureMaterial.bumpMapTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.bumpTextureResourceName);
            }
            if (objMatSubset.mtl.specularTextureResourceName != null)
            {
                subsetData.TextureMaterial.specularTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.specularTextureResourceName);
            }
            return(subsetData);
        }
        private Mesh LoadModel(string name, string path, bool normaliseAndCenterise)
        {
            MeshVertex[] vertices;

            WavefrontObjLoader.FlattenIndices(path, out vertices, normaliseAndCenterise);

            return(new Mesh(name, vertices));
        }
Esempio n. 4
0
 private void _loadData(string baseDirectory, WavefrontObjLoader m)
 {
     foreach (var srcmat in m.materials)
     {
         if (srcmat.faces.Count != 0)
         {
             this.geometrySubsets.Add(_loadMaterialSubset(baseDirectory, m, srcmat));
         }
     }
 }
 private void _loadData(SSAssetManager.Context ctx, WavefrontObjLoader m)
 {
     foreach (var srcmat in m.materials)
     {
         if (srcmat.faces.Count != 0)
         {
             this.geometrySubsets.Add(_loadMaterialSubset(ctx, m, srcmat));
         }
     }
 }
        public SSMesh_wfOBJ(SSAssetManager.Context ctx, string filename)
        {
            this.srcFilename = filename;
            this.ctx         = ctx;


            Console.WriteLine("SSMesh_wfOBJ: loading wff {0}", filename);
            WavefrontObjLoader wff_data = new WavefrontObjLoader(ctx, filename);

            Console.WriteLine("wff vertex count = {0}", wff_data.positions.Count);
            Console.WriteLine("wff face count = {0}", wff_data.numFaces);

            _loadData(ctx, wff_data);
        }
Esempio n. 7
0
        private SSMeshOBJSubsetData _loadMaterialSubset(string baseDirectory, WavefrontObjLoader wff,
                                                        WavefrontObjLoader.MaterialInfoWithFaces objMatSubset)
        {
            // generate renderable geometry data...
            SSVertex_PosNormTex[] vertices;
            UInt16[] triIndices, wireframeIndices;
            VertexSoup_VertexFormatBinder.generateDrawIndexBuffer(
                wff, objMatSubset, out triIndices, out vertices);
            wireframeIndices = OpenTKHelper.generateLineIndicies(triIndices);
            SSMeshOBJSubsetData subsetData = new SSMeshOBJSubsetData(
                vertices, triIndices, wireframeIndices);

            // setup the material...
            // load and link every texture present
            subsetData.textureMaterial = SSTextureMaterial.FromBlenderMtl(baseDirectory, objMatSubset.mtl);
            subsetData.colorMaterial   = SSColorMaterial.fromMtl(objMatSubset.mtl);
            return(subsetData);
        }
        private SSMeshOBJSubsetData _loadMaterialSubset(SSAssetManager.Context ctx, WavefrontObjLoader wff,
                                                        WavefrontObjLoader.MaterialInfoWithFaces objMatSubset)
        {
            // create new mesh subset-data
            SSMeshOBJSubsetData subsetData = new SSMeshOBJSubsetData();

            // setup the material...

            // load and link every texture present
            subsetData.TextureMaterial = new SSTextureMaterial();
            if (objMatSubset.mtl.diffuseTextureResourceName != null)
            {
                subsetData.TextureMaterial.diffuseTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.diffuseTextureResourceName);
            }
            if (objMatSubset.mtl.ambientTextureResourceName != null)
            {
                subsetData.TextureMaterial.ambientTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.ambientTextureResourceName);
            }
            if (objMatSubset.mtl.bumpTextureResourceName != null)
            {
                subsetData.TextureMaterial.bumpMapTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.bumpTextureResourceName);
            }
            if (objMatSubset.mtl.specularTextureResourceName != null)
            {
                subsetData.TextureMaterial.specularTex = SSAssetManager.GetInstance <SSTexture>(ctx, objMatSubset.mtl.specularTextureResourceName);
            }

            // generate renderable geometry data...
            VertexSoup_VertexFormatBinder.generateDrawIndexBuffer(wff, out subsetData.indicies, out subsetData.vertices);

            // TODO: setup VBO/IBO buffers
            // http://www.opentk.com/doc/graphics/geometry/vertex-buffer-objects

            subsetData.wireframe_indicies = OpenTKHelper.generateLineIndicies(subsetData.indicies);

            subsetData.vbo           = new SSVertexBuffer <SSVertex_PosNormTexDiff>(subsetData.vertices);
            subsetData.ibo           = new SSIndexBuffer(subsetData.indicies, subsetData.vbo);
            subsetData.ibo_wireframe = new SSIndexBuffer(subsetData.wireframe_indicies, subsetData.vbo);

            return(subsetData);
        }
Esempio n. 9
0
        double keyDeltaFactor   = 4; // determine the angle delta when the ddirection key pressed

        public MainWindow()
        {
            InitializeComponent();

            WavefrontObjLoader wfl = new WavefrontObjLoader();

            slider1.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider1_ValueChanged);
            slider2.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider1_ValueChanged);
            slider3.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider1_ValueChanged);

            // Specify where in the 3D scene the camera is.
            camera.Position = new Point3D(0, 0, 0);

            // Specify the direction that the camera is pointing.
            camera.LookDirection = new Vector3D(0, 0, -1);

            // Define camera's horizontal field of view in degrees.
            camera.FieldOfView = 1000;

            // Asign the camera to the viewport
            vp.Camera = camera;

            Model3DGroup myModel3DGroup = new Model3DGroup();

            DirectionalLight myDirectionalLight = new DirectionalLight();

            myDirectionalLight.Color     = Colors.White;
            myDirectionalLight.Direction = new Vector3D(-0.61, -0.5, -0.61);

            myModel3DGroup.Children.Add(myDirectionalLight);
            var m = wfl.LoadObjFile(@"F:\MeshedReconstruction.obj");

            m.Content = myModel3DGroup;
            vp.Children.Add(m);

            camera.UpDirection.Normalize();
            this.MouseMove           += Viewport3D_MouseMove;
            this.MouseLeftButtonDown += Viewport3D_MouseLeftButtonDown;
            this.MouseWheel          += Viewport3D_MouseWheel;
            this.KeyDown             += Window_KeyDown;
        }
Esempio n. 10
0
        public SSMesh_wfOBJ(SSAssetManager.Context ctx, string filename)
        {
            this.srcFilename = ctx.fullResourcePath(filename);

            Console.WriteLine("SSMesh_wfOBJ: loading wff {0}",filename);
            WavefrontObjLoader wff_data = new WavefrontObjLoader(ctx, filename);

            Console.WriteLine("wff vertex count = {0}",wff_data.positions.Count);
            Console.WriteLine("wff face count = {0}",wff_data.numFaces);

            _loadData(ctx, wff_data);

            // update radius
            float maxRadSq = 0f;
            foreach (var subset in geometrySubsets) {
                foreach (var vtx in subset.triangleMesh.lastAssignedVertices) {
                    maxRadSq = Math.Max (maxRadSq, vtx.Position.LengthSquared);
                }
            }
            _boundingSphereRadius = (float)Math.Sqrt (maxRadSq);
        }
Esempio n. 11
0
        public void loadModel(string modelPath)
        {
            WavefrontObjLoader rawModel = new WavefrontObjLoader("Assets/Models/spaceship.obj", delegate(string resource_name) {
                String ext = Path.GetExtension(resource_name);
                if (ext == ".obj")
                {
                    return(System.IO.File.Open(resource_name, System.IO.FileMode.Open));
                }
                if (ext == ".mtl")
                {
                    return(System.IO.File.Open("Assets/Materials/" + resource_name, System.IO.FileMode.Open));
                }
                return(null);
            });

            foreach (var mat in rawModel.materials)
            {
                foreach (var face in mat.faces)
                {
                }
            }
        }
Esempio n. 12
0
        public MainWindow()
        {
            InitializeComponent();



            wfl = new WavefrontObjLoader();
            slider1.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider1_ValueChanged);
            slider2.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider2_ValueChanged);
            slider3.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider3_ValueChanged);
            slider4.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider4_ValueChanged);
            slider5.ValueChanged += new RoutedPropertyChangedEventHandler <double>(slider5_ValueChanged);

            createCamera2();//三视图
            createCamera3();
            createCamera4();
            createCamera();
            createLight();
            createModel3D();
            create360();
            createx360();
            //createAnimation();
        }
Esempio n. 13
0
        public SSMesh_wfOBJ(string path)
        {
            this.srcFilename = path;

            Console.WriteLine("SSMesh_wfOBJ: loading wff {0}", path);
            WavefrontObjLoader wff_data = new WavefrontObjLoader(path);

            Console.WriteLine("wff vertex count = {0}", wff_data.positions.Count);
            Console.WriteLine("wff face count = {0}", wff_data.numFaces);

            _loadData(Path.GetDirectoryName(path), wff_data);

            // update radius
            float maxRadSq = 0f;

            foreach (var subset in geometrySubsets)
            {
                foreach (var vtx in subset.triangleMesh.lastAssignedVertices)
                {
                    maxRadSq = Math.Max(maxRadSq, vtx.Position.LengthSquared);
                }
            }
            _boundingSphereRadius = (float)Math.Sqrt(maxRadSq);
        }
        // generateDrawIndexBuffer(..)
        //
        // Walks the wavefront faces, feeds pre-configured verticies to the VertexSoup,
        // and returns a new index-buffer pointing to the new VertexSoup.verticies indicies.

        public static void generateDrawIndexBuffer(
            WavefrontObjLoader wff,
            out UInt16[] indicies_return,
            out SSVertex_PosNormDiffTex1[] verticies_return)
        {
            const bool shouldDedup = true;             // this lets us turn on/of vertex-soup deduping

            var           soup          = new VertexSoup <SSVertex_PosNormDiffTex1>(deDup: shouldDedup);
            List <UInt16> draw_indicies = new List <UInt16>();

            // (0) go throu`gh the materials and faces, DENORMALIZE from WF-OBJ into fully-configured verticies

            // load indexes
            foreach (var mtl in wff.materials)
            {
                // wavefrontOBJ stores color in CIE-XYZ color space. Convert this to Alpha-RGB
                var materialDiffuseColor = WavefrontObjLoader.CIEXYZtoColor(mtl.vDiffuse).ToArgb();

                foreach (var face in mtl.faces)
                {
                    // iterate over the vericies of a wave-front FACE...

                    // DEREFERENCE each .obj vertex paramater (position, normal, texture coordinate)
                    SSVertex_PosNormDiffTex1[] vertex_list = new SSVertex_PosNormDiffTex1[face.v_idx.Length];
                    for (int facevertex = 0; facevertex < face.v_idx.Length; facevertex++)
                    {
                        // position
                        vertex_list[facevertex].Position = CV(wff.positions[face.v_idx[facevertex]]);

                        // normal
                        int normal_idx = face.n_idx[facevertex];
                        if (normal_idx != -1)
                        {
                            vertex_list[facevertex].Normal = CV(wff.normals[normal_idx]);
                        }

                        // texture coordinate
                        int tex_index = face.tex_idx[facevertex];
                        if (tex_index != -1)
                        {
                            vertex_list[facevertex].Tu = wff.texCoords[tex_index].U;
                            vertex_list[facevertex].Tv = 1 - wff.texCoords[tex_index].V;
                        }

                        // assign our material's diffusecolor to the vertex diffuse color...
                        vertex_list [facevertex].DiffuseColor = materialDiffuseColor;
                    }

                    // turn them into indicies in the vertex soup..
                    //   .. we hand the soup a set of fully configured verticies. It
                    //   .. dedups and accumulates them, and hands us back indicies
                    //   .. relative to it's growing list of deduped verticies.
                    UInt16[] soup_indicies = soup.digestVerticies(vertex_list);

                    // now we add these indicies to the draw-list. Right now we assume
                    // draw is using GL_TRIANGLE, so we convert NGONS into triange lists
                    if (soup_indicies.Length == 3)                       // triangle
                    {
                        draw_indicies.Add(soup_indicies[0]);
                        draw_indicies.Add(soup_indicies[1]);
                        draw_indicies.Add(soup_indicies[2]);
                    }
                    else if (soup_indicies.Length == 4)                         // quad
                    {
                        draw_indicies.Add(soup_indicies[0]);
                        draw_indicies.Add(soup_indicies[1]);
                        draw_indicies.Add(soup_indicies[2]);

                        draw_indicies.Add(soup_indicies[0]);
                        draw_indicies.Add(soup_indicies[2]);
                        draw_indicies.Add(soup_indicies[3]);
                    }
                    else
                    {
                        // This n-gon algorithm only works if the n-gon is coplanar and convex,
                        // which Wavefront OBJ says they must be.
                        //  .. to tesselate concave ngons, one must tesselate using a more complex method, see
                        //    http://en.wikipedia.org/wiki/Polygon_triangulation#Ear_clipping_method

                        // manually generate a triangle-fan
                        for (int x = 1; x < (soup_indicies.Length - 1); x++)
                        {
                            draw_indicies.Add(soup_indicies[0]);
                            draw_indicies.Add(soup_indicies[x]);
                            draw_indicies.Add(soup_indicies[x + 1]);
                        }
                        // throw new NotImplementedException("unhandled face size: " + newindicies.Length);
                    }
                }
            }

            // convert the linked-lists into arrays and return
            indicies_return  = draw_indicies.ToArray();
            verticies_return = soup.verticies.ToArray();

            Console.WriteLine("VertexSoup_VertexFormatBinder:generateDrawIndexBuffer : \r\n   {0} verticies, {1} indicies.  Dedup = {2}",
                              verticies_return.Length, indicies_return.Length,
                              shouldDedup ? "YES" : "NO");
        }
Esempio n. 15
0
        public static Mesh LoadMesh(Device device, string path)
        {
            var loader = new WavefrontObjLoader();

            loader.LoadObj(path);

            //if (positionTransform.HasValue)
            //{
            //    Matrix3 transform = positionTransform.Value;
            //    for (int j = 0; j < loader.VertexList.Count; j++)
            //    {
            //        loader.VertexList[j].Vector = Matrix3.Transform(loader.VertexList[j].Vector, transform);
            //    }
            //}

            // vertex buffer
            var vertexCount = loader.TriangleCount * 3;
            var vertices    = new ColoredTexturedVertex[vertexCount];

            int i = 0;

            foreach (var face in loader.FaceList)
            {
                var v = face.VertexIndexList;
                var n = face.NormalVertexIndexList;
                var t = face.TextureVertexIndexList;

                vertices[i + 0] = new ColoredTexturedVertex {
                    Position = loader.VertexList[v[0] - 1].Vector, Normal = loader.NormalList[n[0] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[0] - 1].Vector
                };
                vertices[i + 1] = new ColoredTexturedVertex {
                    Position = loader.VertexList[v[1] - 1].Vector, Normal = loader.NormalList[n[1] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[1] - 1].Vector
                };
                vertices[i + 2] = new ColoredTexturedVertex {
                    Position = loader.VertexList[v[2] - 1].Vector, Normal = loader.NormalList[n[2] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[2] - 1].Vector
                };

                i += 3;

                if (v.Length == 4)
                {
                    vertices[i + 0] = new ColoredTexturedVertex {
                        Position = loader.VertexList[v[2] - 1].Vector, Normal = loader.NormalList[n[2] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[2] - 1].Vector
                    };
                    vertices[i + 1] = new ColoredTexturedVertex {
                        Position = loader.VertexList[v[3] - 1].Vector, Normal = loader.NormalList[n[3] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[3] - 1].Vector
                    };
                    vertices[i + 2] = new ColoredTexturedVertex {
                        Position = loader.VertexList[v[0] - 1].Vector, Normal = loader.NormalList[n[0] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[0] - 1].Vector
                    };

                    i += 3;
                }
            }

            var indices = new int[vertexCount];

            for (int j = 0; j < vertexCount; j++)
            {
                indices[j] = j;
            }


            Mesh mesh = new Mesh(device);

            mesh.VertexBuffer = Buffer11.Create <ColoredTexturedVertex>(device.NativeDevice, BindFlags.VertexBuffer, vertices.ToArray());
            mesh.IndexBuffer  = Buffer11.Create(device.NativeDevice, BindFlags.IndexBuffer, indices.ToArray());
            mesh.VertexSize   = Utilities.SizeOf <ColoredTexturedVertex>();

            mesh.IndexCount = indices.Count();

            return(mesh);
        }
Esempio n. 16
0
        public MainWindow()
        {
            InitializeComponent();

            model.change  += new MainViewModel.OnSelectChange(Model_change);
            model.change1 += new MainViewModel.OnSelectChange(Model_change1);;
            //right.DataContext = ModelPartList2;
            this.Closed     += MainWindow_Closed;
            this.DataContext = model;

            //摄像头
            myPCamera               = new PerspectiveCamera();
            myPCamera.Position      = new Point3D(0, 0, 200);
            myPCamera.LookDirection = new Vector3D(0, 0, -1);
            myPCamera.FieldOfView   = 1000;
            vp.Camera               = myPCamera;

            Model3DGroup myModel3DGroup = new Model3DGroup();
            //光源
            //AmbientLight (自然光)
            //DirectionalLight (方向光)
            //PointLight (点光源)
            //SpotLight (聚光源)
            AmbientLight myDirectionalLight = new AmbientLight();

            myDirectionalLight.Color = Colors.White;
            // myDirectionalLight.Direction = new Vector3D(0.61, 0.5, 0.61);
            myModel3DGroup.Children.Add(myDirectionalLight);

            //DirectionalLight myDirectionalLight2 = new DirectionalLight();
            //myDirectionalLight2.Color = Colors.White;
            //myDirectionalLight2.Direction = new Vector3D(0.61, 0.5, 0.61);
            //myModel3DGroup.Children.Add(myDirectionalLight2);



            //new一个loader对象
            WavefrontObjLoader wfl = new WavefrontObjLoader();
            //ModelVisual3DWithName是WavefrontObjLoader定义的继承ModelVisual3D的对象,直接使用ModelVisual3D也是可以的
            //导入obj,第一个模型命名为m
            //ModelVisual3DWithName m = wfl.LoadObjFile(@"C:\Users\36102\Desktop\rdk_material_scene.obj");
            //m.Content = myModel3DGroup;
            ////导入obj,第二个模型命名为n
            //var n = wfl.LoadObjFile(@"C:\Users\hasee\Desktop\WpfApplication2\WpfApplication2\精细人体.obj");
            //n.Content = myModel3DGroup;

            //下面是调整n的位置,初学者可以先注释掉。
            var tt = new TranslateTransform3D();

            tt.OffsetX = 110;
            tt.OffsetZ = -50;
            tt.OffsetY = -100;
            var tr = new RotateTransform3D();

            tr.Rotation = new AxisAngleRotation3D(new Vector3D(1, 0, 0), 90);

            var tr2 = new RotateTransform3D();

            tr2.Rotation = new AxisAngleRotation3D(new Vector3D(0, 0, 1), -45);

            var ts = new ScaleTransform3D();

            ts.ScaleX = 1.5;
            ts.ScaleY = 1.5;
            ts.ScaleZ = 1.6;
            var tg = new Transform3DGroup();

            // tg.Children.Add(tr);// tg.Children.Add(tr2); tg.Children.Add(tt); tg.Children.Add(ts);
            //n.Transform = tg;
            //将两个模型添加到场景中
            //vp.Children.Add(m);
            //   vp.Children.Add(n);
            //添加鼠标事件,用于显示隐藏光晕特效
            vp.MouseEnter          += Vp_MouseEnter;
            vp.MouseLeave          += Vp_MouseLeave;
            vp.MouseWheel          += VP_MouseWheel;
            vp.MouseMove           += Window_MouseMove;
            vp.MouseLeftButtonDown += vp_MouseLeftButtonDown;
        }
Esempio n. 17
0
        public static VertexBufferBinding Load(GraphicsDevice device, String filename, Matrix3?positionTransform = null)
        {
            var library = Injector.Global.Resolve <Library>();
            var loader  = new WavefrontObjLoader();

            using (var stream = library.OpenRead(filename))
            {
                loader.LoadObj(stream);
            }

            if (positionTransform.HasValue)
            {
                Matrix3 transform = positionTransform.Value;
                for (int j = 0; j < loader.VertexList.Count; j++)
                {
                    loader.VertexList[j].Vector = Matrix3.Transform(loader.VertexList[j].Vector, transform);
                }
            }

            // vertex buffer
            var vertexCount = loader.TriangleCount * 3;
            var vertices    = new PositionColorTexture[vertexCount];

            int i = 0;

            foreach (var face in loader.FaceList)
            {
                var v = face.VertexIndexList;
                var t = face.TextureVertexIndexList;

                vertices[i + 0] = new PositionColorTexture {
                    Position = loader.VertexList[v[0] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[0] - 1].Vector
                };
                vertices[i + 2] = new PositionColorTexture {
                    Position = loader.VertexList[v[1] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[1] - 1].Vector
                };
                vertices[i + 1] = new PositionColorTexture {
                    Position = loader.VertexList[v[2] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[2] - 1].Vector
                };

                i += 3;

                if (v.Length == 4)
                {
                    vertices[i + 0] = new PositionColorTexture {
                        Position = loader.VertexList[v[2] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[2] - 1].Vector
                    };
                    vertices[i + 2] = new PositionColorTexture {
                        Position = loader.VertexList[v[3] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[3] - 1].Vector
                    };
                    vertices[i + 1] = new PositionColorTexture {
                        Position = loader.VertexList[v[0] - 1].Vector, Color = new Vector4(1, 1, 1, 1), Texture = loader.TextureList[t[0] - 1].Vector
                    };

                    i += 3;
                }
            }

            var buffer = GraphicsBuffer.Create(device, vertices, false);

            return(new VertexBufferBinding
            {
                Buffer = buffer,
                Count = vertexCount,
                Declaration = PositionColorTexture.Layout,
                Stride = PositionColorTexture.Layout.Stride,
            });
        }
        // convert wavefrontobjloader vector formats, to our OpenTK Vector3 format
        // generateDrawIndexBuffer(..)
        //
        // Walks the wavefront faces, feeds pre-configured verticies to the VertexSoup,
        // and returns a new index-buffer pointing to the new VertexSoup.verticies indicies.
        public static void generateDrawIndexBuffer(
			WavefrontObjLoader wff, 
            WavefrontObjLoader.MaterialInfoWithFaces objMatSubset,
			out UInt16[] indicies_return, 
			out SSVertex_PosNormTex[] verticies_return)
        {
            const bool shouldDedup = true; // this lets us turn on/of vertex-soup deduping

            var soup = new VertexSoup<SSVertex_PosNormTex>(deDup:shouldDedup);
            List<UInt16> draw_indicies = new List<UInt16>();

            // (0) go throu`gh the materials and faces, DENORMALIZE from WF-OBJ into fully-configured verticies

            // load indexes
            var m = objMatSubset;

            // wavefrontOBJ stores color in CIE-XYZ color space. Convert this to Alpha-RGB
            var materialDiffuseColor = WavefrontObjLoader.CIEXYZtoColor(m.mtl.vDiffuse).ToArgb();

            foreach (var face in m.faces) {

                // iterate over the vericies of a wave-front FACE...

                // DEREFERENCE each .obj vertex paramater (position, normal, texture coordinate)
                SSVertex_PosNormTex[] vertex_list = new SSVertex_PosNormTex[face.v_idx.Length];
                for (int facevertex = 0; facevertex < face.v_idx.Length; facevertex++) {

                    // position
                    vertex_list[facevertex].Position = wff.positions[face.v_idx[facevertex]].Xyz;

                    // normal
                    int normal_idx = face.n_idx[facevertex];
                    if (normal_idx != -1) {
                        vertex_list[facevertex].Normal = wff.normals[normal_idx];
                    }

                    // texture coordinate
                    int tex_index = face.tex_idx[facevertex];
                    if (tex_index != -1 ) {
                        vertex_list[facevertex].Tu = wff.texCoords[tex_index].X;
                        vertex_list[facevertex].Tv = 1- wff.texCoords[tex_index].Y;
                    }
                }

                // turn them into indicies in the vertex soup..
                //   .. we hand the soup a set of fully configured verticies. It
                //   .. dedups and accumulates them, and hands us back indicies
                //   .. relative to it's growing list of deduped verticies.
                UInt16[] soup_indicies = soup.digestVerticies(vertex_list);

                // now we add these indicies to the draw-list. Right now we assume
                // draw is using GL_TRIANGLE, so we convert NGONS into triange lists
                if (soup_indicies.Length == 3) { // triangle
                    draw_indicies.Add(soup_indicies[0]);
                    draw_indicies.Add(soup_indicies[1]);
                    draw_indicies.Add(soup_indicies[2]);
                } else if (soup_indicies.Length == 4) { // quad
                    draw_indicies.Add(soup_indicies[0]);
                    draw_indicies.Add(soup_indicies[1]);
                    draw_indicies.Add(soup_indicies[2]);

                    draw_indicies.Add(soup_indicies[0]);
                    draw_indicies.Add(soup_indicies[2]);
                    draw_indicies.Add(soup_indicies[3]);
                } else {
                    // This n-gon algorithm only works if the n-gon is coplanar and convex,
                    // which Wavefront OBJ says they must be.
                    //  .. to tesselate concave ngons, one must tesselate using a more complex method, see
                    //    http://en.wikipedia.org/wiki/Polygon_triangulation#Ear_clipping_method

                    // manually generate a triangle-fan
                    for (int x = 1; x < (soup_indicies.Length-1); x++) {
                        draw_indicies.Add(soup_indicies[0]);
                        draw_indicies.Add(soup_indicies[x]);
                        draw_indicies.Add(soup_indicies[x+1]);
                    }
                    // throw new NotImplementedException("unhandled face size: " + newindicies.Length);
                }
            }

            // convert the linked-lists into arrays and return
            indicies_return = draw_indicies.ToArray();
            verticies_return = soup.verticies.ToArray();

            Console.WriteLine ("VertexSoup_VertexFormatBinder:generateDrawIndexBuffer : \r\n   {0} verticies, {1} indicies.  Dedup = {2}",
                              verticies_return.Length, indicies_return.Length,
                              shouldDedup ? "YES" : "NO");
        }
Esempio n. 19
0
 private void _loadData(SSAssetManager.Context ctx ,WavefrontObjLoader m)
 {
     foreach (var srcmat in m.materials) {
         if (srcmat.faces.Count != 0) {
             this.geometrySubsets.Add(_loadMaterialSubset(ctx, m, srcmat));
         }
     }
 }
Esempio n. 20
0
        private SSMeshOBJSubsetData _loadMaterialSubset(SSAssetManager.Context ctx, WavefrontObjLoader wff, 
														WavefrontObjLoader.MaterialInfoWithFaces objMatSubset)
        {
            // generate renderable geometry data...
            SSVertex_PosNormTexDiff[] vertices;
            UInt16[] triIndices, wireframeIndices;
            VertexSoup_VertexFormatBinder.generateDrawIndexBuffer(
                wff, out triIndices, out vertices);
            wireframeIndices = OpenTKHelper.generateLineIndicies (triIndices);
            SSMeshOBJSubsetData subsetData = new SSMeshOBJSubsetData(
                vertices, triIndices, wireframeIndices);

            // setup the material...
            // load and link every texture present
            subsetData.TextureMaterial = new SSTextureMaterial();
            if (objMatSubset.mtl.diffuseTextureResourceName != null) {
                subsetData.TextureMaterial.diffuseTex = SSAssetManager.GetInstance<SSTexture>(ctx, objMatSubset.mtl.diffuseTextureResourceName);
            }
            if (objMatSubset.mtl.ambientTextureResourceName != null) {
                subsetData.TextureMaterial.ambientTex = SSAssetManager.GetInstance<SSTexture>(ctx, objMatSubset.mtl.ambientTextureResourceName);
            }
            if (objMatSubset.mtl.bumpTextureResourceName != null) {
                subsetData.TextureMaterial.bumpMapTex = SSAssetManager.GetInstance<SSTexture>(ctx, objMatSubset.mtl.bumpTextureResourceName);
            }
            if (objMatSubset.mtl.specularTextureResourceName != null) {
                subsetData.TextureMaterial.specularTex = SSAssetManager.GetInstance<SSTexture>(ctx, objMatSubset.mtl.specularTextureResourceName);
            }
            return subsetData;
        }
Esempio n. 21
0
        public MainWindow()
        {
            InitializeComponent();

            //摄像头
            myPCamera               = new PerspectiveCamera();
            myPCamera.Position      = new Point3D(0, 0, 200);
            myPCamera.LookDirection = new Vector3D(0, 0, -1);
            myPCamera.FieldOfView   = 1000;
            vp.Camera               = myPCamera;

            Model3DGroup myModel3DGroup = new Model3DGroup();
            //光源
            //AmbientLight (自然光)
            //DirectionalLight (方向光)
            //PointLight (点光源)
            //SpotLight (聚光源)
            DirectionalLight myDirectionalLight = new DirectionalLight();

            myDirectionalLight.Color     = Colors.White;
            myDirectionalLight.Direction = new Vector3D(197.862232727069, 17.7047755886957, -11.8470804909344);
            myModel3DGroup.Children.Add(myDirectionalLight);

            //DirectionalLight myDirectionalLight2 = new DirectionalLight();
            //myDirectionalLight2.Color = Colors.White;
            //myDirectionalLight2.Direction = new Vector3D(0.61, 0.5, 0.61);
            //myModel3DGroup.Children.Add(myDirectionalLight2);



            //new一个loader对象
            WavefrontObjLoader wfl = new WavefrontObjLoader();
            //ModelVisual3DWithName是WavefrontObjLoader定义的继承ModelVisual3D的对象,直接使用ModelVisual3D也是可以的
            //导入obj,第一个模型命名为m
            //ModelVisual3DWithName m = wfl.LoadObjFile(@"C:\Users\renzeming\Downloads\WpfApplication2\Lancer_Evolution_10.obj");
            //m.Content = myModel3DGroup;
            //导入obj,第二个模型命名为n
            var n = wfl.LoadObjFile(@"E:\Mine\Items\Demos\WebApplication1\Example_WPF_donghua\texttrue\low.obj");

            n.Content = myModel3DGroup;

            //下面是调整n的位置,初学者可以先注释掉。
            var tt = new TranslateTransform3D();
            //tt.OffsetX = 110;
            //tt.OffsetZ = -50;
            //tt.OffsetY = -100;
            var tr = new RotateTransform3D();

            tr.Rotation = new AxisAngleRotation3D(new Vector3D(1, 0, 0), 90);

            var tr2 = new RotateTransform3D();

            tr2.Rotation = new AxisAngleRotation3D(new Vector3D(0, 0, 1), 90);

            var ts = new ScaleTransform3D();

            ts.ScaleX = 1.5;
            ts.ScaleY = 1.5;
            ts.ScaleZ = 1.6;
            var tg = new Transform3DGroup();

            tg.Children.Add(tr); tg.Children.Add(tr2);
            tg.Children.Add(tt); tg.Children.Add(ts);
            n.Transform = tg;

            //将两个模型添加到场景中
            //vp.Children.Add(m);
            //197.862232727069,17.7047755886957,-11.8470804909344



            vp.Children.Add(n);

            InitializeComponent();
        }