private void CreateScene()
        {
            // IMPORTANT:
            // Before the Form is closed, we need to dispose all the DXEngine objects that we created (all that implement IDisposable).
            // This means that all materials, Mesh objects and SceneNodes need to be disposed.
            // To make this easier, we can use the DisposeList collection that will hold IDisposable objects.
            _disposables = new DisposeList();


            //
            // 1)
            //
            // The easiest way to add 3D models to DXEngine's scene is to add WPF's Visual3D objects to Viewport3D.Children collection:

            var pyramidVisual3D = new Ab3d.Visuals.PyramidVisual3D()
            {
                BottomCenterPosition = new Point3D(-100, 0, 0),
                Size     = new Size3D(80, 50, 80),
                Material = new DiffuseMaterial(Brushes.Blue)
            };

            pyramidVisual3D.SetName("PyramidVisual3D");

            MainViewport.Children.Add(pyramidVisual3D);


            // We could also start from PyramidMesh3D and then create GeometryModel3D and ModelVisual3D
            //var pyramidMeshGeometry3D = new Ab3d.Meshes.PyramidMesh3D(new Point3D(100, 0, 0), new Size3D(80, 50, 80)).Geometry;

            //if (pyramidMeshGeometry3D.Normals.Count == 0)
            //    pyramidMeshGeometry3D.Normals = Ab3d.Utilities.MeshUtils.CalculateNormals(pyramidMeshGeometry3D);

            //var geometryModel3D = new GeometryModel3D(pyramidMeshGeometry3D, diffuseMaterial);
            //var modelVisual3D = new ModelVisual3D()
            //{
            //    Content = geometryModel3D
            //};

            //MainViewport.Children.Add(modelVisual3D);



            // DXEngine internally converts WPF objects into SceneNodes.
            // You can get the string that describes the SceneNodes with opening Visual Studio Immediate Window and execting the following:
            // MainDXViewportView.DXScene.DumpSceneNodes();
            //
            // Usually this is the best was to define the 3D scene.
            //
            // But if you have very complex objects with a lot of positions, it might be good to create the SceneNodes manually.
            // This allows faster initialization because WPF 3D objects are not created.
            // Also all the memory used by WPF 3D objects can be freed.
            //
            // Because WPF uses double type for Point3D and Vector3D types instead of float as in DirectX and DXEngine,
            // the memory size required for a 3D objects in WPF is almost twice the size of what is required in DXEngine.
            //
            // For example if your object has 100.000 positions, the the memory requirements are the following:
            //
            // In WPF:
            // Positions:           100.000 * 3 (x,y,z) * 8 (8 bytes for one double value) = 2.400.000 bytes
            // Normals:             100.000 * 3 (x,y,z) * 8 (8 bytes for one double value) = 2.400.000 bytes
            // Texture coordinates: 100.000 * 2 (u,y) * 8 (8 bytes for one double value)   = 1.600.000 bytes
            // Triangle indices:    100.000 * 4 (4 bytes for one Int32)                    =   400.000 bytes (the actual number of triangle indices may be different - depends on how many positions are shared between triangles)
            // TOTAL:                                                                      = 6.800.000 bytes = 6.7 MB
            //
            // In DXEngine:
            // Positions:           100.000 * 3 (x,y,z) * 4 (4 bytes for one float value) = 1.200.000 bytes
            // Normals:             100.000 * 3 (x,y,z) * 4 (4 bytes for one float value) = 1.200.000 bytes
            // Texture coordinates: 100.000 * 2 (u,y) * 4 (4 bytes for one float value)   =   800.000 bytes
            // Triangle indices:    100.000 * 4 (4 bytes for one Int32)                   =   400.000 bytes
            // TOTAL:                                                                     = 3.600.000 bytes = 3.5 MB
            //
            // Usually both objects need to be initialized (takes CPU time) and are stored in memory.
            //
            //
            // When the DXEngine's SceneNodes are manually created, the WPF objects can be cleared from memory
            // or event the SceneNodes can be created without the intermediate WPF objects.
            //
            // One the SceneNode is created it can be added to the scene with using SceneNodeVisual3D.
            // This is a Visual3D and can be added to the Viewport3D.Children collection.
            // The object also provides a way to add Transformation to the SceneNode.
            //
            // A disadvantage of creating SceneNodes is that such objects cannot be shown when WPF 3D rendering is used (for example in case when DXEngine falls back to WPF 3D rendering because of problems with DirectX initialization).
            // Another disadvantage is that it is more complicated to create and modify SceneNodes.
            //
            // Usually, when memory usage is not problematic, it is better to use standard WPF 3D objects.

            //
            // 2)
            //
            // Create MeshObjectNode from GeometryMesh with providing arrays (IList<T>) for positions, normals, textureCoordinates and triangleIndices:

            Vector3[] positions;
            Vector3[] normals;
            Vector2[] textureCoordinates;
            int[]     triangleIndices;

            // Get Pyramid mesh data
            GetObjectDataArrays(out positions, out normals, out textureCoordinates, out triangleIndices);


            // The easiest way to create DXEngine's material is to use Ab3d.DirectX.Materials.WpfMaterial that takes a WPF material and converts it into DXEngine's material
            var diffuseMaterial = new DiffuseMaterial(Brushes.Green);
            var dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            // Create SceneNode
            // First create GeometryMesh object from the mesh arrays
            var geometryMesh = new Ab3d.DirectX.GeometryMesh(positions, normals, textureCoordinates, triangleIndices, "PyramidMesh3D");

            _disposables.Add(geometryMesh);

            // NOTE:
            // We could also create GeometryMesh from WPF's MeshGeometry with help from DXMeshGeometry3D:
            //var wpfPyramidMesh = new Meshes.PyramidMesh3D(bottomCenterPosition: new System.Windows.Media.Media3D.Point3D(0, 0, 0),
            //                                              size: new System.Windows.Media.Media3D.Size3D(30, 20, 10));

            //var geometryMesh = new Ab3d.DirectX.Models.DXMeshGeometry3D(wpfPyramidMesh.Geometry, "PyramidMesh");


            // Use GeometryMesh to create MeshObjectNode (SceneNode from GeometryMesh object)
            var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(geometryMesh, dxMaterial);

            meshObjectNode.Name = "Green-MeshObjectNode-from-GeometryMesh";

            _disposables.Add(meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);

            //sceneNodeVisual3D.Transform = new TranslateTransform3D(0, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 3)
            //
            // Create MeshObjectNode from SimpleMesh<T> with providing VertexBufferArray and IndexBufferArray:
            // This option provides faster initialization, because the VertexBufferArray is already generated and it can be directly used to create DirectX vertex buffer.
            // In the previous sample the VertexBufferArray was generated in the GeometryMesh from positions, normals, textureCoordinates arrays.
            //
            // If you can store your 3D models in disk (or some other location) in a form of VertexBuffer and IndexBuffer,
            // then this is the fastes way to initialize 3D objects.

            //
            // 3a)
            //
            // The standard way to create a SimpleMesh is to use the PositionNormalTexture or some other struct that defines the data for one array:

            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;
            GetVertexAndIndexBuffer(out vertexBuffer, out indexBuffer);

            var simpleMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer,
                                                                    indexBuffer,
                                                                    inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                    name: "SimpleMesh-from-PositionNormalTexture-array");

            _disposables.Add(simpleMesh);

            diffuseMaterial = new DiffuseMaterial(Brushes.Red);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            _redPyramidObjectNode      = new Ab3d.DirectX.MeshObjectNode(simpleMesh, dxMaterial);
            _redPyramidObjectNode.Name = "Red-MeshObjectNode-from-SimpleMesh";

            _disposables.Add(_redPyramidObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(_redPyramidObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(100, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);



            //
            // 3b)
            //
            // It is also possible to create SimpleMesh with a base type - for example float (for example if we read data from file).
            // In this case we need to set the ArrayStride property.
            //
            // A drawback of using a non-standard vertex buffer (Vector3, PositionNormalTexture, PositionNormal or PositionTexture)
            // is that such mesh does not support hit testing.
            // In this sample this is demonstrated with camera rotation around mouse hit object - it is not possible to rotate around SimpleMesh<float>.

            float[] floatVertexBuffer;
            GetFloatVertexAndIndexBuffer(out floatVertexBuffer, out indexBuffer);

            var floatSimpleMesh = new SimpleMesh <float>(floatVertexBuffer,
                                                         indexBuffer,
                                                         inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                         name: "SimpleMesh-from-float-array");

            _disposables.Add(floatSimpleMesh);

            // IMPORTANT:
            // When we do not use PositionNormalTexture or PositionNormal, the DXEngine cannot calculate Bounds of the SimpleMesh for us.
            // In this case we need to calculate and specify Bounds manually:
            // Defined bounds for the following mesh: new Ab3d.Meshes.PyramidMesh3D(new Point3D(0, 0, 0), new Size3D(80, 50, 80))
            floatSimpleMesh.Bounds = new Bounds(new BoundingBox(minimum: new Vector3(-40, -25, -40), maximum: new Vector3(40, 25, 40)));

            // Because we created SimpleMesh with a base type (float),
            // we need to specify how many array elements define one Vertex.
            // This is 8 in our case: 3 (position x,y,z) + 3 (normal x,y,z) + 2 (texture coordinate u,v) = 8
            floatSimpleMesh.ArrayStride = 8;


            diffuseMaterial = new DiffuseMaterial(Brushes.Orange);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            _orangePyramidObjectNode      = new Ab3d.DirectX.MeshObjectNode(floatSimpleMesh, dxMaterial);
            _orangePyramidObjectNode.Name = "Orange-MeshObjectNode-from-FloatSimpleMesh";

            _disposables.Add(_orangePyramidObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(_orangePyramidObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(200, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);



            //
            // 3c)
            //
            // Instead of float array elements, it is also possible to use byte array to create SimpleMesh.
            //
            // As before, a drawback of using a non-standard vertex buffer (Vector3, PositionNormalTexture, PositionNormal or PositionTexture)
            // is that such mesh does not support hit testing.
            // In this sample this is demonstrated with camera rotation around mouse hit object - it is not possible to rotate around SimpleMesh<float>.

            byte[] byteVertexBuffer;
            GetByteVertexAndIndexBuffer(out byteVertexBuffer, out indexBuffer);

            var byteSimpleMesh = new SimpleMesh <byte>(byteVertexBuffer,
                                                       indexBuffer,
                                                       inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                       name: "SimpleMesh-from-byte-array");

            _disposables.Add(byteSimpleMesh);

            // IMPORTANT:
            // When we do not use PositionNormalTexture or PositionNormal, the DXEngine cannot calculate Bounds of the SimpleMesh for us.
            // In this case we need to calculate and specify Bounds manually:
            // Defined bounds for the following mesh: new Ab3d.Meshes.PyramidMesh3D(new Point3D(0, 0, 0), new Size3D(80, 50, 80))
            byteSimpleMesh.Bounds = new Bounds(new BoundingBox(minimum: new Vector3(-40, -25, -40), maximum: new Vector3(40, 25, 40)));

            // Because we created SimpleMesh with a base type (byte),
            // we need to specify how many array elements define one Vertex.
            // This is 32 in our case: 8 (8x float value) * 4 (4 bytes for one float) = 32
            byteSimpleMesh.ArrayStride = 32;


            diffuseMaterial = new DiffuseMaterial(Brushes.Yellow);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(byteSimpleMesh, dxMaterial);
            meshObjectNode.Name = "Yellow-MeshObjectNode-from-ByteSimpleMesh";

            _disposables.Add(meshObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(meshObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(300, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 4)
            //
            // When a frozen Model3DGroup is added to the DXViewportView, it is converted into the WpfOptimizedModel3DGroupNode (derived from SceneNode).
            // In this case both WPF and DXEngine's 3D objects data are stored in memory.
            //
            // To release the WPF 3D objects data, it is possible to create the WpfOptimizedModel3DGroupNode manually and
            // then clear the used WPF 3D objects.
            // This can be done with setting the AutomaticallyClearWpfObjectsAfterInitialization property on WpfOptimizedModel3DGroupNode to true,
            // or by calling the ClearWpfObjects method on WpfOptimizedModel3DGroupNode.

            string dragonModelFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Resources\\Models\\dragon_vrip_res3.obj");

            var     readerObj   = new Ab3d.ReaderObj();
            Model3D readModel3D = readerObj.ReadModel3D(dragonModelFileName);

            double scale = 100 / readModel3D.Bounds.SizeX; // Scale the model to 100 SizeX

            readModel3D.Transform = new ScaleTransform3D(scale, scale, scale);

            var model3DGroup = readModel3D as Model3DGroup;

            if (model3DGroup == null)
            {
                model3DGroup = new Model3DGroup();
                model3DGroup.Children.Add(readModel3D);
            }

            model3DGroup.Freeze();


            var wpfOptimizedModel3DGroupNode = new Ab3d.DirectX.Models.WpfOptimizedModel3DGroupNode(model3DGroup, name: "Frozen Model3DGroup");

            wpfOptimizedModel3DGroupNode.AutomaticallyClearWpfObjectsAfterInitialization = true; // This will clear the WPF 3D models that are referenced by WpfOptimizedModel3DGroupNode when the DirectX objects are created

            _disposables.Add(wpfOptimizedModel3DGroupNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(wpfOptimizedModel3DGroupNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(-100, -20, -100);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 5)
            //
            // The following code shows how to load texture with using TextureLoader

            if (MainDXViewportView.DXScene != null)
            {
                var planeGeometry3D  = new Ab3d.Meshes.PlaneMesh3D(new Point3D(0, 0, 0), new Vector3D(0, 1, 0), new Vector3D(1, 0, 0), new Size(80, 80), 1, 1).Geometry;
                var dxMeshGeometry3D = new DXMeshGeometry3D(planeGeometry3D);
                _disposables.Add(dxMeshGeometry3D);

                // Load texture file into ShaderResourceView (in our case we load dds file; but we could also load png file)
                string textureFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/ab4d-logo-220x220.dds");


                // The easiest way to load image file and in the same time create a material with the loaded texture is to use the CreateStandardTextureMaterial method.
                var standardMaterial = Ab3d.DirectX.TextureLoader.CreateStandardTextureMaterial(MainDXViewportView.DXScene.DXDevice, textureFileName);

                // We need to manually dispose the created StandardMaterial and ShaderResourceView
                _disposables.Add(standardMaterial);
                _disposables.Add(standardMaterial.DiffuseTextures[0]);


                // If we want more control over the material creation process, we can use the following code:

                //// To load a texture from file, you can use the TextureLoader.LoadShaderResourceView (this supports loading standard image files and also loading dds files).
                //// This method returns a ShaderResourceView and it can also set a textureInfo parameter that defines some of the properties of the loaded texture (bitmap size, dpi, format, hasTransparency).
                //TextureInfo textureInfo;
                //var loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.Device,
                //                                                                                 textureFileName,
                //                                                                                 out textureInfo);
                //_disposables.Add(loadedShaderResourceView);

                //// Get recommended BlendState based on HasTransparency and HasPreMultipliedAlpha values.
                //// Possible values are: CommonStates.Opaque, CommonStates.PremultipliedAlphaBlend or CommonStates.NonPremultipliedAlphaBlend.
                //var recommendedBlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.GetRecommendedBlendState(textureInfo.HasTransparency, textureInfo.HasPremultipliedAlpha);

                //// Now we can create a DXEngine's StandardMaterial
                //var standardMaterial = new StandardMaterial()
                //{
                //    // Set ShaderResourceView into array of diffuse textures
                //    DiffuseTextures = new ShaderResourceView[] {loadedShaderResourceView},
                //    TextureBlendState = recommendedBlendState,

                //    HasTransparency = textureInfo.HasTransparency,

                //    // When showing texture, the DiffuseColor represents a color mask - each color from texture is multiplied with DiffuseColor (White preserves the original color)
                //    DiffuseColor = Colors.White.ToColor3()
                //};

                //_disposables.Add(standardMaterial);


                meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(dxMeshGeometry3D, standardMaterial);
                meshObjectNode.Name = "MeshObjectNode-from-PlaneMesh3D";

                _disposables.Add(meshObjectNode);

                sceneNodeVisual3D           = new SceneNodeVisual3D(meshObjectNode);
                sceneNodeVisual3D.Transform = new TranslateTransform3D(0, 0, 100);

                MainViewport.Children.Add(sceneNodeVisual3D);
            }



            // Add PointLight
            var pointLight = new PointLight(Colors.White, new Point3D(100, 500, 0));

            MainViewport.Children.Add(pointLight.CreateModelVisual3D());

            Camera1.ShowCameraLight = ShowCameraLightType.Never;
        }
        private void CreateScene()
        {
            var boxMeshGeometry3D = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(BoxSize, BoxSize, BoxSize), 1, 1, 1).Geometry;

            _oneMeshTriangleIndicesCount = boxMeshGeometry3D.TriangleIndices.Count;


            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            CreateMultiMeshBuffer(center: new Vector3(0, 0, 0),
                                  size: new Vector3(XCount * (BoxSize + BoxesMargin), YCount * (BoxSize + BoxesMargin), ZCount * (BoxSize + BoxesMargin)),
                                  xCount: XCount, yCount: YCount, zCount: ZCount,
                                  meshGeometry3D: boxMeshGeometry3D,
                                  vertexBuffer: out vertexBuffer,
                                  indexBuffer: out indexBuffer);

            _multiMaterialMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer, indexBuffer,
                                                                        inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate);


            _indexBufferLength = indexBuffer.Length;

            // i1 is at 1/4 of the height of the box
            _firstColorIndex = (int)(_indexBufferLength / 4);

            // i2 is at 3/4 of the height
            _secondColorIndex = _firstColorIndex * 3;

            _multiMaterialMesh.SubMeshes = new SubMesh[]
            {
                new SubMesh("SubMesh1")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = _firstColorIndex
                },
                new SubMesh("SubMesh2")
                {
                    MaterialIndex = 1, StartIndexLocation = _firstColorIndex, IndexCount = _secondColorIndex - _firstColorIndex
                },
                new SubMesh("SubMesh3")
                {
                    MaterialIndex = 2, StartIndexLocation = _secondColorIndex, IndexCount = _indexBufferLength - _secondColorIndex
                },
            };

            _disposables.Add(_multiMaterialMesh);


            var materials = new Ab3d.DirectX.Material[]
            {
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.DimGray.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Silver.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Gold.ToColor3()
                },
            };

            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(_multiMaterialMesh, materials);

            _disposables.Add(_meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
Beispiel #3
0
        private void CreateScene()
        {
            int xCount = 40;
            int yCount = 1;
            int zCount = 40;

            float sphereRadius = 10;
            float sphereMargin = 10;

            var sphereMeshGeometry3D = new Ab3d.Meshes.SphereMesh3D(new Point3D(0, 0, 0), sphereRadius, 10).Geometry;

            _oneMeshTriangleIndicesCount = sphereMeshGeometry3D.TriangleIndices.Count;


            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            var size = new Vector3(xCount * (sphereRadius + sphereMargin), yCount * (sphereRadius + sphereMargin), zCount * (sphereRadius + sphereMargin));

            SubMeshesSample.CreateMultiMeshBuffer(center: new Vector3(0, 0, 0),
                                                  size: size,
                                                  xCount: xCount, yCount: yCount, zCount: zCount,
                                                  meshGeometry3D: sphereMeshGeometry3D,
                                                  vertexBuffer: out vertexBuffer,
                                                  indexBuffer: out indexBuffer);

            _multiMaterialMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer, indexBuffer,
                                                                        inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate);


            // Create all 3 SubMeshes at the beginning.
            // Though at first only the first SubMesh will be rendered (the other two have IndexCount set to 0),
            // this will allow us to simply change the StartIndexLocation and IndexCount of the SubMeshes
            // to show selected part without adding or removing any SubMesh (this would regenerate the RenderingQueues).
            // This way the selection is almost a no-op (only changing a few integer values and rendering the scene again).
            _multiMaterialMesh.SubMeshes = new SubMesh[]
            {
                // First sub-mesh will render triangles from the first to the start of selection (or all triangles if there is no selection)
                new SubMesh("MainSubMesh1")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = indexBuffer.Length
                },

                // Second sub-mesh will render triangles after the selection (this one follows the first on to preserve the same material)
                new SubMesh("MainSubMesh2")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = 0
                },

                // The third sub-mesh will render selected triangles and will use the second material for that.
                new SubMesh("SelectionSubMesh")
                {
                    MaterialIndex = 1, StartIndexLocation = 0, IndexCount = 0
                },
            };

            _disposables.Add(_multiMaterialMesh);

            // Create OctTree from vertexBuffer.
            // This will significantly improve hit testing performance (check this with uncommenting the dxScene.GetClosestHitObject call in OnMouseMouse method).
            _octTree = new OctTree(vertexBuffer, indexBuffer);


            var materials = new Ab3d.DirectX.Material[]
            {
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Green.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Red.ToColor3()
                }
            };

            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(_multiMaterialMesh, materials);

            _disposables.Add(_meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
Beispiel #4
0
        private void AddSimpleMesh()
        {
            // To show _meshGeometry3D with using low level DXEngine object we will do the following:
            // 1) Create a array of PositionNormalTexture data - this will represent a managed vertex buffer array.
            // 2) Create a SimpleMesh<PositionNormalTexture> object that will create an unmanaged vertex buffer from managed vertex buffer.
            // 3) Create a MeshObjectNode (derived from SceneNode) from the SimpleMesh.
            // 4) Create a SceneNodeVisual3D that will allow us to add the MeshObjectNode to the Viewport3D children.


            // 1) Create a array of PositionNormalTexture data - this will represent a managed vertex buffer array.

            int positionsCount = _meshGeometry3D.Positions.Count;

            _vertexBufferArray = new PositionNormalTexture[positionsCount];
            FillVertexBuffer(_vertexBufferArray, _meshGeometry3D.Positions, _meshGeometry3D.Normals, _meshGeometry3D.TextureCoordinates);

            var indexBuffer = new int[_meshGeometry3D.TriangleIndices.Count];

            _meshGeometry3D.TriangleIndices.CopyTo(indexBuffer, 0);


            // 2) Create a SimpleMesh<PositionNormalTexture> object that will create an unmanaged vertex buffer from managed vertex buffer.

            bool createDynamicVertexBuffer = UseDynamicBufferCheckBox.IsChecked ?? false;

            _simpleMesh = new SimpleMesh <PositionNormalTexture>(_vertexBufferArray,
                                                                 indexBuffer,
                                                                 inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                 name: "SimpleMesh-from-PositionNormalTexture-array",
                                                                 createDynamicVertexBuffer: createDynamicVertexBuffer);

            // We can also manually specify the bounds of the mesh
            // If this is not done, then the SimpleMesh will go through all positions and calculate that.
            // But because the bounds are already calculated by MeshGeometry3D, we can just use that value (we only need to convert that to DXEngine's bounds).
            _simpleMesh.Bounds = _meshGeometry3D.Bounds.ToDXEngineBounds();

            _originalMeshSizeY = _simpleMesh.Bounds.BoundingBox.Maximum.Y - _simpleMesh.Bounds.BoundingBox.Minimum.Y;

            _disposables.Add(_simpleMesh);


            var diffuseMaterial = new DiffuseMaterial(Brushes.Silver);
            var dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);


            // 3) Create a MeshObjectNode (derived from SceneNode) from the SimpleMesh.

            _meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(_simpleMesh, dxMaterial);
            _meshObjectNode.Name = "MeshObjectNode-from-SimpleMesh";

            _disposables.Add(_meshObjectNode);


            // 4) Create a SceneNodeVisual3D that will allow us to add the MeshObjectNode to the Viewport3D children.

            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);


            // Scale and translate the sceneNodeVisual3D and than add it to the scene
            AddVisual3D(sceneNodeVisual3D);
        }
Beispiel #5
0
        private void InitializePointCloud(Vector3[] positions, BoundingBox positionsBounds, Color4[] positionColors, bool useOptimizedPointMesh, bool disableDepthRead, bool disableDepthWrite)
        {
            if (MainDXViewportView.DXScene == null)
            {
                return; // If this happens, then this method is called too soon (before DXEngine is initialized) or we are using WPF 3D
            }
            // First, set up the material:

            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor  = Color4.White, // When using PixelColors, PixelColor is used as a mask (multiplied with each color)
                PixelSize   = 2,
                PixelColors = positionColors,

                // By default graphics card renders objects that are closer to the camera over the objects that are farther away from the camera.
                // This means that positions that are closer to the camera will be rendered over the positions that are farther away.
                // This may distort the shown colors.
                // Therefore when using pixel colors it is better to disable depth buffer checking and render all the pixels.
                // This is done with setting ReadZBuffer and WriteZBuffer to false.
                ReadZBuffer  = !disableDepthRead,
                WriteZBuffer = !disableDepthWrite
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now set up the mesh and create SceneNode to show it

            if (useOptimizedPointMesh)
            {
                _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positions,
                                                                       positionsBounds,
                                                                       segmentsCount: 100);

                // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

                _optimizedPointMesh.OptimizationIndicesNumberThreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
                _optimizedPointMesh.MaxOptimizationViewsCount          = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberThreshold or when all vertices needs to be shown.

                _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), standardPointSize: 1);

                _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);

                _disposables.Add(_optimizedPointMesh);


                // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
                var customRenderableNode = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);
                customRenderableNode.Name = "CustomRenderableNode";
                //customRenderableNode.CustomRenderingQueue = MainDXViewportView.DXScene.BackgroundRenderingQueue;

                _disposables.Add(customRenderableNode);

                var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);
                //sceneNodeVisual3D.Transform = transform;

                MainViewport.Children.Add(sceneNodeVisual3D);
            }
            else
            {
                // Use SimpleMesh - all positions will be always rendered:

                var simpleMesh = new SimpleMesh <Vector3>(vertexBufferArray: positions,
                                                          indexBufferArray: null,
                                                          inputLayoutType: InputLayoutType.Position);

                simpleMesh.PrimitiveTopology = PrimitiveTopology.PointList; // We need to change the default PrimitiveTopology.TriangleList to PointList

                // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.

                // It is highly recommended to manually set the Bounds.
                simpleMesh.Bounds = new Bounds(positionsBounds);

                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                //simpleMesh.CalculateBounds();

                // We will need to dispose the SimpleMesh
                _disposables.Add(simpleMesh);


                // Now create a new MeshObjectNode
                _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, _pixelMaterial);

                _disposables.Add(_meshObjectNode);

                // To be able to add the MeshObjectNode (or any other SceneNode) to WPF's Viewport3D,
                // we need to create a SceneNodeVisual3D
                var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

                MainViewport.Children.Add(sceneNodeVisual3D);
            }


            Camera1.TargetPosition = positionsBounds.Center.ToWpfPoint3D();
            Camera1.Distance       = positionsBounds.ToRect3D().GetDiagonalLength();
        }
        private void ShowPositionsArray(Vector3[] positionsArray, float pixelSize, Color4 pixelColor, Bounds positionBounds)
        {
            if (_isUsingPixelsVisual3D)
            {
                // The easiest way to show many pixels is to use PixelsVisual3D.
                var pixelsVisual3D = new PixelsVisual3D()
                {
                    Positions  = positionsArray,
                    PixelColor = pixelColor.ToWpfColor(),
                    PixelSize  = pixelSize
                };

                // It is highly recommended to manually set the PositionsBounds.
                // If this is not done, the bounds are calculated by the DXEngine with checking all the positions.
                pixelsVisual3D.PositionsBounds = positionBounds;

                MainViewport.Children.Add(pixelsVisual3D);

                // !!! IMPORTANT !!!
                // When PixelsVisual3D is not used any more, it needs to be disposed (we are using DisposeList to dispose all in Unloaded event handler)
                _disposables.Add(pixelsVisual3D);

                return;
            }


            // First stop in showing positions in the positionsArray as pixels is to create a SimpleMesh<Vector3>.
            // This will create a DirectX VertexBuffer that will be passed to the shaders.
            var simpleMesh = new SimpleMesh <Vector3>(vertexBufferArray: positionsArray,
                                                      indexBufferArray: null,
                                                      inputLayoutType: InputLayoutType.Position);

            simpleMesh.PrimitiveTopology = PrimitiveTopology.PointList; // We need to change the default PrimitiveTopology.TriangleList to PointList

            // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.

            if (positionBounds != null && !positionBounds.IsEmpty)
            {
                // It is highly recommended to manually set the Bounds.
                simpleMesh.Bounds = positionBounds;
            }
            else
            {
                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                simpleMesh.CalculateBounds();
            }

            simpleMesh.CalculateBounds();


            // We will need to dispose the SimpleMesh
            _disposables.Add(simpleMesh);


            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor = pixelColor,
                PixelSize  = pixelSize
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now create a new MeshObjectNode
            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, _pixelMaterial);

            _disposables.Add(_meshObjectNode);

            // To be able to add the MeshObjectNode (or any other SceneNode) to WPF's Viewport3D,
            // we need to create a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void CreateTestModels()
        {
            _rootModelVisual3D = new ModelVisual3D();
            MainViewport3D.Children.Add(_rootModelVisual3D);



            // SphereVisual3D
            _sphereVisual3D = new Ab3d.Visuals.SphereVisual3D()
            {
                CenterPosition = new Point3D(-50, 0, -50),
                Radius         = 30,
                Material       = new DiffuseMaterial(Brushes.Silver)
            };

            _sphereVisual3D.SetName("SphereVisual3D");

            _rootModelVisual3D.Children.Add(_sphereVisual3D);


            var readerObj   = new ReaderObj();
            var teapotModel = readerObj.ReadModel3D(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Models\teapot-hires.obj"));

            Ab3d.Utilities.ModelUtils.CenterAndScaleModel3D(teapotModel, centerPosition: new Point3D(50, 0, -50), finalSize: new Size3D(80, 80, 80), preserveAspectRatio: true);

            _teapotModelVisual3D = new ModelVisual3D()
            {
                Content = teapotModel
            };

            teapotModel.SetName("teapot Model3D");
            _teapotModelVisual3D.SetName("teapot ModelVisual3D");

            _rootModelVisual3D.Children.Add(_teapotModelVisual3D);


            // InstancedMeshGeometryVisual3D
            var boxMesh3D = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(6, 6, 6), 1, 1, 1);

            InstanceData[] instancedData = DXEnginePerformance.InstancedMeshGeometry3DTest.CreateInstancesData(center: new Point3D(-50, 0, 50),
                                                                                                               size: new Size3D(80, 50, 80),
                                                                                                               modelScaleFactor: 1,
                                                                                                               xCount: 5,
                                                                                                               yCount: 1,
                                                                                                               zCount: 5,
                                                                                                               useTransparency: false);

            _instancedMeshGeometryVisual3D = new InstancedMeshGeometryVisual3D(boxMesh3D.Geometry);
            _instancedMeshGeometryVisual3D.InstancesData = instancedData;

            _instancedMeshGeometryVisual3D.SetName("InstancedMeshGeometryVisual3D");
            _rootModelVisual3D.Children.Add(_instancedMeshGeometryVisual3D);



            // MeshObjectNode and SceneNodeVisual3D
            var meshGeometry3D   = new Ab3d.Meshes.PyramidMesh3D(new Point3D(50, -20, 50), new Size3D(80, 40, 80)).Geometry;
            var dxMeshGeometry3D = new Ab3d.DirectX.Models.DXMeshGeometry3D(meshGeometry3D);

            var standardMaterial = new StandardMaterial()
            {
                DiffuseColor = Colors.Gold.ToColor3()
            };

            _pyramidMeshObjectNode = new Ab3d.DirectX.MeshObjectNode(dxMeshGeometry3D, standardMaterial);

            _disposables.Add(dxMeshGeometry3D);
            _disposables.Add(_pyramidMeshObjectNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(_pyramidMeshObjectNode);

            sceneNodeVisual3D.SetName("SceneNodeVisual3D");
            _rootModelVisual3D.Children.Add(sceneNodeVisual3D);
        }