Exemplo n.º 1
0
        //private void GenerateHeightMapObject(float[,] heightData, Color4[] positionColorsArray)
        private void GenerateHeightMapSceneNodes(MeshBase heightMapMesh, Ab3d.DirectX.Material dxMaterial)
        {
            var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(heightMapMesh, dxMaterial);

            meshObjectNode.Name = "HeightMeshObjectNode";

            _disposables.Add(meshObjectNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);

            RootContentVisual3D.Children.Add(sceneNodeVisual3D);


            // If you also want to render back faces of the height map you need to create another MeshObjectNode and set its IsBackFaceMaterial to true.
            // You can reuse the mesh. But this still requires almost twice the GPU power.
            var backDiffuseMaterial = new DiffuseMaterial(Brushes.Gray);
            var backDXMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(backDiffuseMaterial);

            meshObjectNode = new Ab3d.DirectX.MeshObjectNode(heightMapMesh, backDXMaterial);
            meshObjectNode.IsBackFaceMaterial = true;
            meshObjectNode.Name = "HeightBackMeshObjectNode";

            _disposables.Add(meshObjectNode);

            sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);
            RootContentVisual3D.Children.Add(sceneNodeVisual3D);
        }
        private void CreateTest3DObjects()
        {
            TestObjectsModelVisual3D.Children.Clear();


            // Reset ReadZBuffer to its default value
            _dxLineMaterial.ReadZBuffer = true;

            var objectMaterial = new DiffuseMaterial(Brushes.Silver);

            // Uncomment the following line to test drawing hidden lines behind transparent objects:
            //objectMaterial = new DiffuseMaterial(new SolidColorBrush(Color.FromArgb(230, 100, 100, 100)));


            CreateCylinderWithCircles(new Point3D(0, 0, -5), 10, 30, objectMaterial);

            CreateBoxWithEdgeLines(new Point3D(0, 10, 40), new Size3D(20, 20, 20), objectMaterial);

            CreateTeapotWireframeModel(new Point3D(0, 10, -50), new Size3D(50, 50, 50), objectMaterial);


            // The following code shows how to create a DXEngine's ScreenSpaceLineNode directly (see DXEngineAdvanced/ScreenSpaceLineNodeSample for more info):
            var positions = new Vector3[2];

            positions[0] = new Vector3(-70, -3, 60);
            positions[1] = new Vector3(70, -3, 60);

            _screenSpaceLineNode = new ScreenSpaceLineNode(positions, isLineStrip: false, isLineClosed: false, lineMaterial: _dxLineMaterial);

            // To add ScreenSpaceLineNode into WPF's objects hierarchy we use SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(_screenSpaceLineNode);

            TestObjectsModelVisual3D.Children.Add(sceneNodeVisual3D);
        }
Exemplo n.º 3
0
        public CustomRenderingStep4()
        {
            InitializeComponent();


            // Instead of creating new RenderingStep as in the previous sample,
            // we will use CustomRenderableNode instead.
            // This type of SceneNode object allows specifying custom rendering action
            // that is called to render the object.

            var bounds = CustomRenderingStep1.GetSharpDXBoxBounds(); // CustomRenderableNode also requires bounds so that the camera near and far calculations can account the custom data.
            var customRenderableNode = new CustomRenderableNode(CustomRenderAction, bounds);

            // To add CustomRenderableNode to the 3D scene, we need to embed it into a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);

            MainViewport.Children.Add(sceneNodeVisual3D);



            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene == null) // When DXEngine falls back to WPF 3D rendering, the DXScene is null; we could also check for MainDXViewportView.UsedGraphicsProfile.DriverType != GraphicsProfile.DriverTypes.Wpf3D
                {
                    return;
                }

                InitializeSharpDXRendering(MainDXViewportView.DXScene);
            };

            this.Unloaded += delegate { Dispose(); };
        }
        private void LoadObjFileBackgroundButton_OnClick(object sender, RoutedEventArgs e)
        {
            RootModelVisual3D.Children.Clear();
            GC.Collect();

            if (MainDXViewportView.DXScene == null)
            {
                MessageBox.Show("This sample cannot run with WPF 3D rendering");
                return;
            }

            var dxScene = MainDXViewportView.DXScene;

            Task.Factory.StartNew(() =>
            {
                var stopwatch = new Stopwatch();
                stopwatch.Start();

                // Load model from obj file
                var readerObj   = new Ab3d.ReaderObj();
                var readModel3D = readerObj.ReadModel3D(_objFileName);

                // Scale and position the read model so that its bottom center is at (0,-100,0) and it can fit into 200 x 200 x 200 Rect3D
                Ab3d.Utilities.ModelUtils.PositionAndScaleModel3D(readModel3D, new Point3D(0, -100, 0), PositionTypes.Bottom, new Size3D(200, 200, 200));
                readModel3D.Freeze();

                var createdSceneNode = Ab3d.DirectX.Models.SceneNodeFactory.CreateFromModel3D(readModel3D, null, dxScene);

                // Call InitializeResources to create all required DirectX resources from the background thread
                createdSceneNode.InitializeResources(dxScene);

                stopwatch.Stop();
                _backgroundThreadTime = (float)stopwatch.Elapsed.TotalMilliseconds;

                // Now go to UI thread and create the Visual3D objects and update the scene there.
                // There are two reasons to do that:
                // 1) We cannot create objects that are derived from Visual3D on the background thread (we also cannot freeze them with calling Freeze method - this is possible on MeshGeometry, Model3D and Material objects).
                // 2) We should not update the scene from the backgrond thread because we do not know when the UI thread is reading the scene.
                Dispatcher.Invoke(DispatcherPriority.Normal, new Action(() =>
                {
                    StartStopwatch();

                    // Create SceneNodeVisual3D that will show the created SceneNode
                    var sceneNodeVisual3D = new SceneNodeVisual3D(createdSceneNode);
                    RootModelVisual3D.Children.Add(sceneNodeVisual3D);

                    MainDXViewportView.Refresh(); // Manually render next frame

                    StopStopwatch(UIThreadTime2TextBlock);
                    BackgroundThreadTimeTextBlock.Text = string.Format("Background Thread time: {0:#,##0.00}ms", _backgroundThreadTime);
                }));
            });


            StartStopwatch();
            StopStopwatch(UIThreadTime2TextBlock);
        }
        private void InitializePointCloud(Vector3[] positions, BoundingBox positionsBounds, Color4[] positionColors)
        {
            if (MainDXViewportView.DXScene == null)
            {
                return; // If this happens, then this method is called too soon (before DXEngine is initialized) or we are using WPF 3D
            }
            // First, set up the material:

            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor  = Color4.White, // When using PixelColors, PixelColor is used as a mask (multiplied with each color)
                PixelSize   = 2,
                PixelColors = positionColors,
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now set up the mesh and create SceneNode to show it
            _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positions,
                                                                   positionsBounds,
                                                                   segmentsCount: 100);

            // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

            _optimizedPointMesh.OptimizationIndicesNumberThreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
            _optimizedPointMesh.MaxOptimizationViewsCount          = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberThreshold or when all vertices needs to be shown.

            _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), standardPointSize: 1);

            _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_optimizedPointMesh);


            // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
            var customRenderableNode = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);

            customRenderableNode.Name = "CustomRenderableNode";
            //customRenderableNode.CustomRenderingQueue = MainDXViewportView.DXScene.BackgroundRenderingQueue;

            _disposables.Add(customRenderableNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);

            //sceneNodeVisual3D.Transform = transform;

            MainViewport.Children.Add(sceneNodeVisual3D);


            Camera1.TargetPosition = positionsBounds.Center.ToWpfPoint3D();
            Camera1.Distance       = positionsBounds.ToRect3D().GetDiagonalLength() * 0.5;
        }
Exemplo n.º 6
0
        void CreateColoredPolyLine(int count, int layer)
        {
            Vector3 preV    = new Vector3(0, layer, 0);
            Color4  preC    = Color4.White;
            var     points  = new List <Point3D>();
            var     vectors = new List <Vector3>();
            var     colors  = new List <SharpDX.Color4>();

            for (int i = 0; i < count; i++)
            {
                float   max = 1f;
                Vector3 v   = new Vector3(1, layer, 0);
                v.X += preV.X;
                //Console.WriteLine("{0} : {1} {2} {3}", i, v.X, v.Y, v.Z);
                colors.Add(preC);
                points.Add(new Point3D((double)v.X, (double)v.Y, (double)v.Z));
                vectors.Add(v);
                preV = v;
            }

            DisposeList disposables   = new DisposeList();
            float       lineThickness = 5;
            bool        isPolyLine    = false;
            var         lineMaterial  = new PositionColoredLineMaterial()
            {
                LineColor      = Color4.White, // When PositionColors are used, then LineColor is used as a mask - each color is multiplied by LineColor - use White to preserve PositionColors
                LineThickness  = lineThickness,
                PositionColors = colors.ToArray(),
                IsPolyLine     = isPolyLine
            };

            // NOTE: When rendering multi-lines we need to set isLineStrip to false
            var screenSpaceLineNode = new ScreenSpaceLineNode(vectors.ToArray(), isLineClosed: false, isLineStrip: true, lineMaterial: lineMaterial);

            if (disposables != null)
            {
                disposables.Add(screenSpaceLineNode);
                disposables.Add(lineMaterial);
            }

            var sceneNodeVisual = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual);
            bool isVisualConnected;
            var  lineSelectorData = new LineSelectorData(points, true);

            lineSelectorData.PositionsTransform3D = Ab3d.Utilities.TransformationsHelper.GetVisual3DTotalTransform(sceneNodeVisual, true, out isVisualConnected);
            _lineSelectorData.Add(lineSelectorData);
            var _data = new WLineRenderData(screenSpaceLineNode, lineSelectorData, lineMaterial, colors);

            lrData.Add(_data);
        }
        private ScreenSpaceLineNode CreateLinesWithLineMesh(Vector3[] linePositions, bool isLineStrip, bool isLineClosed, Color lineColor, float xOffset, out ScreenSpaceLineMesh screenSpaceLineMesh)
        {
            if (linePositions == null || linePositions.Length < 2)
            {
                screenSpaceLineMesh = null;
                return(null);
            }

            // If line is closed but the first position is not the same as the last position, then add the first position as the last one
            if (isLineClosed && linePositions[0] != linePositions[linePositions.Length - 1])
            {
                Array.Resize(ref linePositions, linePositions.Length + 1);
                linePositions[linePositions.Length - 1] = linePositions[0];
            }


            // If we can easily calculate the bounding box from line positions
            // it is recommended to specify it in the ScreenSpaceLineMesh constructor.
            // If boundingBox is not specified, it will be calculated in the ScreenSpaceLineMesh constructor with checking all the positions.
            //
            // NOTE: If bounding box is not correct then camera's near and far planes can be invalid and this can cut some 3D objects at near or far plane (when DXScene.OptimizeNearAndFarCameraPlanes is true - by default)
            //var boundingBox = new BoundingBox(new Vector3(startX, 0, startZ), new Vector3(startX + linesCount * margin, 0, endZ));

            // Create ScreenSpaceLineMesh - it is used to create DirectX vertex buffer from positions
            screenSpaceLineMesh = new ScreenSpaceLineMesh(linePositions, isLineStrip);

            // When the line positions are changed many times, it is recommended to set CreateDynamicVertexBuffer to true.
            screenSpaceLineMesh.CreateDynamicVertexBuffer = true;

            var lineMaterial = new LineMaterial()
            {
                LineColor     = lineColor.ToColor4(),
                LineThickness = 2
            };

            var screenSpaceLineNode = new ScreenSpaceLineNode(screenSpaceLineMesh, lineMaterial);

            screenSpaceLineNode.Transform = new Transformation(SharpDX.Matrix.Translation(xOffset, 0, 0));

            // To show ScreenSpaceLineNode in DXViewportView we need to put it inside a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual3D);

            _disposables.Add(screenSpaceLineMesh);
            _disposables.Add(screenSpaceLineNode);
            _disposables.Add(lineMaterial);

            return(screenSpaceLineNode);
        }
Exemplo n.º 8
0
        private ScreenSpaceLineNode CreateLinesWithPositions(Vector3[] linePositions, bool isLineStrip, bool isPolyLine, bool isLineClosed, Color lineColor, float xOffset)
        {
            var lineMaterial = CreateLineMaterial(isPolyLine, lineColor);

            var screenSpaceLineNode = new ScreenSpaceLineNode(linePositions, isLineStrip, isLineClosed, lineMaterial);

            screenSpaceLineNode.Transform = new Transformation(SharpDX.Matrix.Translation(xOffset, 0, 0));

            // To show ScreenSpaceLineNode in DXViewportView we need to put it inside a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual3D);

            _disposables.Add(screenSpaceLineNode);
            _disposables.Add(lineMaterial);

            return(screenSpaceLineNode);
        }
Exemplo n.º 9
0
        private void AddLines(Point3D startPosition, int positionsCount, Color lineColor, bool readZBuffer = true, bool writeZBuffer = true, RenderingQueue customRenderingQueue = null)
        {
            Vector3[] positions = new Vector3[positionsCount * 2];
            Vector3   position  = startPosition.ToVector3();

            int index = 0;

            for (int i = 0; i < positionsCount; i++)
            {
                positions[index]     = position;
                positions[index + 1] = position + new Vector3(40, 0, 0);

                index    += 2;
                position += new Vector3(0, 0, 10);
            }

            // ThickLineEffect that renders the 3D lines can use the ReadZBuffer and WriteZBuffer values from LineMaterial.
            //
            // When ReadZBuffer is false (true by default), then line is rendered without checking the depth buffer -
            // so it is always rendered even it is is behind some other 3D object and should not be visible from the camera).
            //
            // When WriteZBuffer is false (true by default), then when rendering the 3D line, the depth of the line is not
            // written to the depth buffer. So No other object will be made hidden by the line even if that object is behind the line.
            var lineMaterial = new LineMaterial()
            {
                LineColor     = lineColor.ToColor4(),
                LineThickness = 2,
                ReadZBuffer   = readZBuffer,
                WriteZBuffer  = writeZBuffer
            };

            _disposables.Add(lineMaterial);


            var screenSpaceLineNode = new ScreenSpaceLineNode(positions, isLineStrip: false, isLineClosed: false, lineMaterial: lineMaterial);

            // It is also needed that the 3D line is put to the Background or Overlay rendering queue so that it is rendered before or after other 3D objects.
            screenSpaceLineNode.CustomRenderingQueue = customRenderingQueue;

            var sceneNodeVisual3D = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
Exemplo n.º 10
0
        private void CreateTestScene(int totalModelsCount)
        {
            Mouse.OverrideCursor = Cursors.Wait;

            // Creating Model3DGroup takes too much time when a lot of objects are created.
            // Therefore we are creating SceneNodes (MeshObjectNode) directly.

            //var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 0, 0), new Size3D(400, 100, 400), 5, 40, 40, 40);
            //var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 0, 0), new Size3D(400, 200, 400), 5, 40, 20, 40);
            //var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 0, 0), new Size3D(400, 200, 400), 10, 10, 5, 10);
            //modelsCount = model3DGroup.Children.Count;

            //MainViewport.Children.Add(model3DGroup.CreateModelVisual3D());

            try
            {
                if (_sceneNodeVisual3D != null)
                {
                    _mainViewport3D.Children.Remove(_sceneNodeVisual3D);
                    _sceneNodeVisual3D.SceneNode.Dispose();
                }


                var boxMesh = new BoxMesh3D(new Point3D(0, 0, 0), new Size3D(1, 1, 1), 1, 1, 1).Geometry;

                int modelsXZCount = totalModelsCount < 2500 ? 10 : 50;
                int modelsYCount  = totalModelsCount / (modelsXZCount * modelsXZCount);

                var sceneNode = CreateSceneNodes(boxMesh, new Point3D(0, 0, 0), new Size3D(500, modelsYCount * 10, 500), 5, modelsXZCount, modelsYCount, modelsXZCount);

                _objectsCount = sceneNode.ChildNodesCount;

                _sceneNodeVisual3D = new SceneNodeVisual3D(sceneNode);
                _mainViewport3D.Children.Add(_sceneNodeVisual3D);

                _mainDXViewportView.Refresh();
            }
            finally
            {
                Mouse.OverrideCursor = null;
            }
        }
        private void CreateScene()
        {
            // IMPORTANT:
            // Before the Form is closed, we need to dispose all the DXEngine objects that we created (all that implement IDisposable).
            // This means that all materials, Mesh objects and SceneNodes need to be disposed.
            // To make this easier, we can use the DisposeList collection that will hold IDisposable objects.
            _disposables = new DisposeList();


            //
            // 1)
            //
            // The easiest way to add 3D models to DXEngine's scene is to add WPF's Visual3D objects to Viewport3D.Children collection:

            var pyramidVisual3D = new Ab3d.Visuals.PyramidVisual3D()
            {
                BottomCenterPosition = new Point3D(-100, 0, 0),
                Size     = new Size3D(80, 50, 80),
                Material = new DiffuseMaterial(Brushes.Blue)
            };

            pyramidVisual3D.SetName("PyramidVisual3D");

            MainViewport.Children.Add(pyramidVisual3D);


            // We could also start from PyramidMesh3D and then create GeometryModel3D and ModelVisual3D
            //var pyramidMeshGeometry3D = new Ab3d.Meshes.PyramidMesh3D(new Point3D(100, 0, 0), new Size3D(80, 50, 80)).Geometry;

            //if (pyramidMeshGeometry3D.Normals.Count == 0)
            //    pyramidMeshGeometry3D.Normals = Ab3d.Utilities.MeshUtils.CalculateNormals(pyramidMeshGeometry3D);

            //var geometryModel3D = new GeometryModel3D(pyramidMeshGeometry3D, diffuseMaterial);
            //var modelVisual3D = new ModelVisual3D()
            //{
            //    Content = geometryModel3D
            //};

            //MainViewport.Children.Add(modelVisual3D);



            // DXEngine internally converts WPF objects into SceneNodes.
            // You can get the string that describes the SceneNodes with opening Visual Studio Immediate Window and execting the following:
            // MainDXViewportView.DXScene.DumpSceneNodes();
            //
            // Usually this is the best was to define the 3D scene.
            //
            // But if you have very complex objects with a lot of positions, it might be good to create the SceneNodes manually.
            // This allows faster initialization because WPF 3D objects are not created.
            // Also all the memory used by WPF 3D objects can be freed.
            //
            // Because WPF uses double type for Point3D and Vector3D types instead of float as in DirectX and DXEngine,
            // the memory size required for a 3D objects in WPF is almost twice the size of what is required in DXEngine.
            //
            // For example if your object has 100.000 positions, the the memory requirements are the following:
            //
            // In WPF:
            // Positions:           100.000 * 3 (x,y,z) * 8 (8 bytes for one double value) = 2.400.000 bytes
            // Normals:             100.000 * 3 (x,y,z) * 8 (8 bytes for one double value) = 2.400.000 bytes
            // Texture coordinates: 100.000 * 2 (u,y) * 8 (8 bytes for one double value)   = 1.600.000 bytes
            // Triangle indices:    100.000 * 4 (4 bytes for one Int32)                    =   400.000 bytes (the actual number of triangle indices may be different - depends on how many positions are shared between triangles)
            // TOTAL:                                                                      = 6.800.000 bytes = 6.7 MB
            //
            // In DXEngine:
            // Positions:           100.000 * 3 (x,y,z) * 4 (4 bytes for one float value) = 1.200.000 bytes
            // Normals:             100.000 * 3 (x,y,z) * 4 (4 bytes for one float value) = 1.200.000 bytes
            // Texture coordinates: 100.000 * 2 (u,y) * 4 (4 bytes for one float value)   =   800.000 bytes
            // Triangle indices:    100.000 * 4 (4 bytes for one Int32)                   =   400.000 bytes
            // TOTAL:                                                                     = 3.600.000 bytes = 3.5 MB
            //
            // Usually both objects need to be initialized (takes CPU time) and are stored in memory.
            //
            //
            // When the DXEngine's SceneNodes are manually created, the WPF objects can be cleared from memory
            // or event the SceneNodes can be created without the intermediate WPF objects.
            //
            // One the SceneNode is created it can be added to the scene with using SceneNodeVisual3D.
            // This is a Visual3D and can be added to the Viewport3D.Children collection.
            // The object also provides a way to add Transformation to the SceneNode.
            //
            // A disadvantage of creating SceneNodes is that such objects cannot be shown when WPF 3D rendering is used (for example in case when DXEngine falls back to WPF 3D rendering because of problems with DirectX initialization).
            // Another disadvantage is that it is more complicated to create and modify SceneNodes.
            //
            // Usually, when memory usage is not problematic, it is better to use standard WPF 3D objects.

            //
            // 2)
            //
            // Create MeshObjectNode from GeometryMesh with providing arrays (IList<T>) for positions, normals, textureCoordinates and triangleIndices:

            Vector3[] positions;
            Vector3[] normals;
            Vector2[] textureCoordinates;
            int[]     triangleIndices;

            // Get Pyramid mesh data
            GetObjectDataArrays(out positions, out normals, out textureCoordinates, out triangleIndices);


            // The easiest way to create DXEngine's material is to use Ab3d.DirectX.Materials.WpfMaterial that takes a WPF material and converts it into DXEngine's material
            var diffuseMaterial = new DiffuseMaterial(Brushes.Green);
            var dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            // Create SceneNode
            // First create GeometryMesh object from the mesh arrays
            var geometryMesh = new Ab3d.DirectX.GeometryMesh(positions, normals, textureCoordinates, triangleIndices, "PyramidMesh3D");

            _disposables.Add(geometryMesh);

            // NOTE:
            // We could also create GeometryMesh from WPF's MeshGeometry with help from DXMeshGeometry3D:
            //var wpfPyramidMesh = new Meshes.PyramidMesh3D(bottomCenterPosition: new System.Windows.Media.Media3D.Point3D(0, 0, 0),
            //                                              size: new System.Windows.Media.Media3D.Size3D(30, 20, 10));

            //var geometryMesh = new Ab3d.DirectX.Models.DXMeshGeometry3D(wpfPyramidMesh.Geometry, "PyramidMesh");


            // Use GeometryMesh to create MeshObjectNode (SceneNode from GeometryMesh object)
            var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(geometryMesh, dxMaterial);

            meshObjectNode.Name = "Green-MeshObjectNode-from-GeometryMesh";

            _disposables.Add(meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);

            //sceneNodeVisual3D.Transform = new TranslateTransform3D(0, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 3)
            //
            // Create MeshObjectNode from SimpleMesh<T> with providing VertexBufferArray and IndexBufferArray:
            // This option provides faster initialization, because the VertexBufferArray is already generated and it can be directly used to create DirectX vertex buffer.
            // In the previous sample the VertexBufferArray was generated in the GeometryMesh from positions, normals, textureCoordinates arrays.
            //
            // If you can store your 3D models in disk (or some other location) in a form of VertexBuffer and IndexBuffer,
            // then this is the fastes way to initialize 3D objects.

            //
            // 3a)
            //
            // The standard way to create a SimpleMesh is to use the PositionNormalTexture or some other struct that defines the data for one array:

            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;
            GetVertexAndIndexBuffer(out vertexBuffer, out indexBuffer);

            var simpleMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer,
                                                                    indexBuffer,
                                                                    inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                    name: "SimpleMesh-from-PositionNormalTexture-array");

            _disposables.Add(simpleMesh);

            diffuseMaterial = new DiffuseMaterial(Brushes.Red);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            _redPyramidObjectNode      = new Ab3d.DirectX.MeshObjectNode(simpleMesh, dxMaterial);
            _redPyramidObjectNode.Name = "Red-MeshObjectNode-from-SimpleMesh";

            _disposables.Add(_redPyramidObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(_redPyramidObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(100, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);



            //
            // 3b)
            //
            // It is also possible to create SimpleMesh with a base type - for example float (for example if we read data from file).
            // In this case we need to set the ArrayStride property.
            //
            // A drawback of using a non-standard vertex buffer (Vector3, PositionNormalTexture, PositionNormal or PositionTexture)
            // is that such mesh does not support hit testing.
            // In this sample this is demonstrated with camera rotation around mouse hit object - it is not possible to rotate around SimpleMesh<float>.

            float[] floatVertexBuffer;
            GetFloatVertexAndIndexBuffer(out floatVertexBuffer, out indexBuffer);

            var floatSimpleMesh = new SimpleMesh <float>(floatVertexBuffer,
                                                         indexBuffer,
                                                         inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                         name: "SimpleMesh-from-float-array");

            _disposables.Add(floatSimpleMesh);

            // IMPORTANT:
            // When we do not use PositionNormalTexture or PositionNormal, the DXEngine cannot calculate Bounds of the SimpleMesh for us.
            // In this case we need to calculate and specify Bounds manually:
            // Defined bounds for the following mesh: new Ab3d.Meshes.PyramidMesh3D(new Point3D(0, 0, 0), new Size3D(80, 50, 80))
            floatSimpleMesh.Bounds = new Bounds(new BoundingBox(minimum: new Vector3(-40, -25, -40), maximum: new Vector3(40, 25, 40)));

            // Because we created SimpleMesh with a base type (float),
            // we need to specify how many array elements define one Vertex.
            // This is 8 in our case: 3 (position x,y,z) + 3 (normal x,y,z) + 2 (texture coordinate u,v) = 8
            floatSimpleMesh.ArrayStride = 8;


            diffuseMaterial = new DiffuseMaterial(Brushes.Orange);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            _orangePyramidObjectNode      = new Ab3d.DirectX.MeshObjectNode(floatSimpleMesh, dxMaterial);
            _orangePyramidObjectNode.Name = "Orange-MeshObjectNode-from-FloatSimpleMesh";

            _disposables.Add(_orangePyramidObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(_orangePyramidObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(200, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);



            //
            // 3c)
            //
            // Instead of float array elements, it is also possible to use byte array to create SimpleMesh.
            //
            // As before, a drawback of using a non-standard vertex buffer (Vector3, PositionNormalTexture, PositionNormal or PositionTexture)
            // is that such mesh does not support hit testing.
            // In this sample this is demonstrated with camera rotation around mouse hit object - it is not possible to rotate around SimpleMesh<float>.

            byte[] byteVertexBuffer;
            GetByteVertexAndIndexBuffer(out byteVertexBuffer, out indexBuffer);

            var byteSimpleMesh = new SimpleMesh <byte>(byteVertexBuffer,
                                                       indexBuffer,
                                                       inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                       name: "SimpleMesh-from-byte-array");

            _disposables.Add(byteSimpleMesh);

            // IMPORTANT:
            // When we do not use PositionNormalTexture or PositionNormal, the DXEngine cannot calculate Bounds of the SimpleMesh for us.
            // In this case we need to calculate and specify Bounds manually:
            // Defined bounds for the following mesh: new Ab3d.Meshes.PyramidMesh3D(new Point3D(0, 0, 0), new Size3D(80, 50, 80))
            byteSimpleMesh.Bounds = new Bounds(new BoundingBox(minimum: new Vector3(-40, -25, -40), maximum: new Vector3(40, 25, 40)));

            // Because we created SimpleMesh with a base type (byte),
            // we need to specify how many array elements define one Vertex.
            // This is 32 in our case: 8 (8x float value) * 4 (4 bytes for one float) = 32
            byteSimpleMesh.ArrayStride = 32;


            diffuseMaterial = new DiffuseMaterial(Brushes.Yellow);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(byteSimpleMesh, dxMaterial);
            meshObjectNode.Name = "Yellow-MeshObjectNode-from-ByteSimpleMesh";

            _disposables.Add(meshObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(meshObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(300, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 4)
            //
            // When a frozen Model3DGroup is added to the DXViewportView, it is converted into the WpfOptimizedModel3DGroupNode (derived from SceneNode).
            // In this case both WPF and DXEngine's 3D objects data are stored in memory.
            //
            // To release the WPF 3D objects data, it is possible to create the WpfOptimizedModel3DGroupNode manually and
            // then clear the used WPF 3D objects.
            // This can be done with setting the AutomaticallyClearWpfObjectsAfterInitialization property on WpfOptimizedModel3DGroupNode to true,
            // or by calling the ClearWpfObjects method on WpfOptimizedModel3DGroupNode.

            string dragonModelFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Resources\\Models\\dragon_vrip_res3.obj");

            var     readerObj   = new Ab3d.ReaderObj();
            Model3D readModel3D = readerObj.ReadModel3D(dragonModelFileName);

            double scale = 100 / readModel3D.Bounds.SizeX; // Scale the model to 100 SizeX

            readModel3D.Transform = new ScaleTransform3D(scale, scale, scale);

            var model3DGroup = readModel3D as Model3DGroup;

            if (model3DGroup == null)
            {
                model3DGroup = new Model3DGroup();
                model3DGroup.Children.Add(readModel3D);
            }

            model3DGroup.Freeze();


            var wpfOptimizedModel3DGroupNode = new Ab3d.DirectX.Models.WpfOptimizedModel3DGroupNode(model3DGroup, name: "Frozen Model3DGroup");

            wpfOptimizedModel3DGroupNode.AutomaticallyClearWpfObjectsAfterInitialization = true; // This will clear the WPF 3D models that are referenced by WpfOptimizedModel3DGroupNode when the DirectX objects are created

            _disposables.Add(wpfOptimizedModel3DGroupNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(wpfOptimizedModel3DGroupNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(-100, -20, -100);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 5)
            //
            // The following code shows how to load texture with using TextureLoader

            if (MainDXViewportView.DXScene != null)
            {
                var planeGeometry3D  = new Ab3d.Meshes.PlaneMesh3D(new Point3D(0, 0, 0), new Vector3D(0, 1, 0), new Vector3D(1, 0, 0), new Size(80, 80), 1, 1).Geometry;
                var dxMeshGeometry3D = new DXMeshGeometry3D(planeGeometry3D);
                _disposables.Add(dxMeshGeometry3D);

                // Load texture file into ShaderResourceView (in our case we load dds file; but we could also load png file)
                string textureFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/ab4d-logo-220x220.dds");


                // The easiest way to load image file and in the same time create a material with the loaded texture is to use the CreateStandardTextureMaterial method.
                var standardMaterial = Ab3d.DirectX.TextureLoader.CreateStandardTextureMaterial(MainDXViewportView.DXScene.DXDevice, textureFileName);

                // We need to manually dispose the created StandardMaterial and ShaderResourceView
                _disposables.Add(standardMaterial);
                _disposables.Add(standardMaterial.DiffuseTextures[0]);


                // If we want more control over the material creation process, we can use the following code:

                //// To load a texture from file, you can use the TextureLoader.LoadShaderResourceView (this supports loading standard image files and also loading dds files).
                //// This method returns a ShaderResourceView and it can also set a textureInfo parameter that defines some of the properties of the loaded texture (bitmap size, dpi, format, hasTransparency).
                //TextureInfo textureInfo;
                //var loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.Device,
                //                                                                                 textureFileName,
                //                                                                                 out textureInfo);
                //_disposables.Add(loadedShaderResourceView);

                //// Get recommended BlendState based on HasTransparency and HasPreMultipliedAlpha values.
                //// Possible values are: CommonStates.Opaque, CommonStates.PremultipliedAlphaBlend or CommonStates.NonPremultipliedAlphaBlend.
                //var recommendedBlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.GetRecommendedBlendState(textureInfo.HasTransparency, textureInfo.HasPremultipliedAlpha);

                //// Now we can create a DXEngine's StandardMaterial
                //var standardMaterial = new StandardMaterial()
                //{
                //    // Set ShaderResourceView into array of diffuse textures
                //    DiffuseTextures = new ShaderResourceView[] {loadedShaderResourceView},
                //    TextureBlendState = recommendedBlendState,

                //    HasTransparency = textureInfo.HasTransparency,

                //    // When showing texture, the DiffuseColor represents a color mask - each color from texture is multiplied with DiffuseColor (White preserves the original color)
                //    DiffuseColor = Colors.White.ToColor3()
                //};

                //_disposables.Add(standardMaterial);


                meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(dxMeshGeometry3D, standardMaterial);
                meshObjectNode.Name = "MeshObjectNode-from-PlaneMesh3D";

                _disposables.Add(meshObjectNode);

                sceneNodeVisual3D           = new SceneNodeVisual3D(meshObjectNode);
                sceneNodeVisual3D.Transform = new TranslateTransform3D(0, 0, 100);

                MainViewport.Children.Add(sceneNodeVisual3D);
            }



            // Add PointLight
            var pointLight = new PointLight(Colors.White, new Point3D(100, 500, 0));

            MainViewport.Children.Add(pointLight.CreateModelVisual3D());

            Camera1.ShowCameraLight = ShowCameraLightType.Never;
        }
        private void ShowVisibleAndHiddenLines()
        {
            // To show both visible and hidden lines we need to render each line twice:
            // once with standard settings to shew visible part of the one,
            // once with using HiddenLineMaterial to show the hidden part of the line.


            // Now we will clone the existing 3D lines
            var existingLineVisuals = TestObjectsModelVisual3D.Children.OfType <BaseLineVisual3D>().ToList();

            var newLineVisuals = new List <BaseLineVisual3D>();

            foreach (var lineVisual3D in existingLineVisuals)
            {
                var clonedLineVisual = CloneLineVisuals(lineVisual3D);

                // To correctly show hidden lines, then need to be rendered after the objects in front of the lines
                // (they are rendered only in case when there are already some objects in front of them - line's depth is bigger then current depth value).
                // In case you want to show the hidden lines behind semi-transparent objects, you need to make sure that
                // the lines are put into the OverlayRenderingQueue.
                // This is needed because TransparentRenderingQueue is defined after LineGeometryRenderingQueue
                // and therefore all transparent objects are rendered after all 3D lines (this is needed so the lines are visible through transparent objects).
                // This can be done with using the SetDXAttribute method and setting the CustomRenderingQueue value.
                // Note that this value need to be set before the line is initialized by the DXEngine - so before the MainDXViewportView.Update call a few lines below.
                // (in case of using ScreenSpaceLineNode, you can set its CustomRenderingQueue).
                //clonedLineVisual.SetDXAttribute(DXAttributeType.CustomRenderingQueue, MainDXViewportView.DXScene.OverlayRenderingQueue);

                TestObjectsModelVisual3D.Children.Add(clonedLineVisual);
                newLineVisuals.Add(clonedLineVisual);
            }

            // After adding new WPF objects to the scene, we need to manually call Update to create DXEngine's SceneNode objects that will be needed later
            MainDXViewportView.Update();

            // We need to update the _sceneNodesDictionary because we have changed the scene
            CreateSceneNodesDictionary();

            // Now change the materials of the clones lines to hiddenLineMaterial
            foreach (var newLineVisual3D in newLineVisuals)
            {
                // Now we can change the material to _hiddenLineMaterial.
                //
                // We also need to put the hidden line to the OverlayRenderingQueue.
                // This is needed because to correctly show hidden lines, they need to be rendered after the objects in front of the lines
                // (they are rendered only in case when there are already some objects in front of them - line's depth is bigger then current depth value).
                // In case you want to show the hidden lines behind semi-transparent objects, you need to make sure that
                // the lines are put into the OverlayRenderingQueue.
                // This is needed because TransparentRenderingQueue is defined after LineGeometryRenderingQueue
                // and therefore all transparent objects are rendered after all 3D lines (this is needed so the lines are visible through transparent objects).
                //
                // Here this is done with setting the CustomRenderingQueue on the ScreenSpaceLineNode (see ChangeLineMaterial method).

                ChangeLineMaterial(newLineVisual3D, _hiddenLineMaterial, MainDXViewportView.DXScene.OverlayRenderingQueue);

                // We could also call SetDXAttribute and set the CustomRenderingQueue to OverlayRenderingQueue.
                // This can be done with uncommenting the following line
                // (but this is less efficient than setting the CustomRenderingQueue on the ScreenSpaceLineNode as done in the ChangeLineMaterial):
                //newLineVisual3D.SetDXAttribute(DXAttributeType.CustomRenderingQueue, MainDXViewportView.DXScene.OverlayRenderingQueue);
            }


            if (_wireframeGeometryModel3D != null)
            {
                // Clone the GeometryModel3D that shows teapot wireframe and use hiddenLineMaterial to render it
                var newWpfWireframeMaterial = new DiffuseMaterial(Brushes.Red);
                newWpfWireframeMaterial.SetUsedDXMaterial(_hiddenLineMaterial);

                var geometryModel3D = new GeometryModel3D(_wireframeGeometryModel3D.Geometry, newWpfWireframeMaterial);
                geometryModel3D.Transform = _wireframeGeometryModel3D.Transform;
                var modelVisual3D = new ModelVisual3D()
                {
                    Content = geometryModel3D
                };

                TestObjectsModelVisual3D.Children.Add(modelVisual3D);
            }


            // Create a new ScreenSpaceLineNode from the data for _screenSpaceLineNode
            // Set its material to _hiddenLineMaterial and move it to the OverlayRenderingQueue:
            var hiddenScreenSpaceLineNode = new ScreenSpaceLineNode(_screenSpaceLineNode.Positions, _screenSpaceLineNode.IsLineStrip, _screenSpaceLineNode.IsLineClosed, _hiddenLineMaterial);

            hiddenScreenSpaceLineNode.CustomRenderingQueue = MainDXViewportView.DXScene.OverlayRenderingQueue;

            var sceneNodeVisual3D = new SceneNodeVisual3D(hiddenScreenSpaceLineNode);

            TestObjectsModelVisual3D.Children.Add(sceneNodeVisual3D);
        }
Exemplo n.º 13
0
        private void SceneTypeComboBox_OnSelectionChanged(object sender, SelectionChangedEventArgs e)
        {
            if (!this.IsLoaded)
            {
                return;
            }


            MainViewport.Children.Clear();
            DisposeInstancedTextNodes();

            InfoTextBox.Text       = "";
            InfoTextBox.Visibility = Visibility.Collapsed;


            if (SceneTypeComboBox.SelectedIndex == 0)
            {
                CreateSimpleDemoScene();

                Camera1.Heading        = 30;
                Camera1.Attitude       = -20;
                Camera1.Distance       = 600;
                Camera1.TargetPosition = new Point3D(0, 0, 0);
            }
            else
            {
                _instancedTextNode = new InstancedTextNode(new FontFamily("Consolas"), FontWeights.Normal, fontBitmapSize: 128);

                // Reset direction
                _instancedTextNode.SetTextDirection(textDirection: new Vector3D(1, 0, 0), upDirection: new Vector3D(0, 1, 0));


                try
                {
                    Mouse.OverrideCursor = Cursors.Wait;

                    if (SceneTypeComboBox.SelectedIndex == 1)
                    {
                        CreateInstanceText(_instancedTextNode, centerPosition: new Point3D(0, 0, 0), size: new Size3D(1000, 500, 2000), xCount: 10, yCount: 20, zCount: 40, textColor: Colors.Black, textSize: 10);
                    }
                    else if (SceneTypeComboBox.SelectedIndex == 2)
                    {
                        CreateInstanceText(_instancedTextNode, centerPosition: new Point3D(0, 0, 0), size: new Size3D(2000, 2000, 2000), xCount: 20, yCount: 100, zCount: 100, textColor: Colors.Black, textSize: 10);
                    }
                    else if (SceneTypeComboBox.SelectedIndex == 3)
                    {
                        CreateInstanceText(_instancedTextNode, centerPosition: new Point3D(0, 0, 0), size: new Size3D(2000, 2000, 10000), xCount: 20, yCount: 100, zCount: 500, textColor: Colors.Black, textSize: 10);
                    }

                    var sceneNodeVisual1 = new SceneNodeVisual3D(_instancedTextNode);
                    MainViewport.Children.Add(sceneNodeVisual1);


                    Camera1.Heading        = -8.2881686066695;
                    Camera1.Attitude       = 3.35596244333162;
                    Camera1.Distance       = 1131.38948539394;
                    Camera1.TargetPosition = new Point3D(67.7795992281885, 14.1717311898692, 1.24683504967857);

                    UpdateCharactersCountInfo();
                }
                finally
                {
                    Mouse.OverrideCursor = null;
                }


                SetupDemoSceneButtons(isDemoSceneShown: false);
            }
        }
Exemplo n.º 14
0
        private void CreateScene()
        {
            int xCount = 40;
            int yCount = 1;
            int zCount = 40;

            float sphereRadius = 10;
            float sphereMargin = 10;

            var sphereMeshGeometry3D = new Ab3d.Meshes.SphereMesh3D(new Point3D(0, 0, 0), sphereRadius, 10).Geometry;

            _oneMeshTriangleIndicesCount = sphereMeshGeometry3D.TriangleIndices.Count;


            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            var size = new Vector3(xCount * (sphereRadius + sphereMargin), yCount * (sphereRadius + sphereMargin), zCount * (sphereRadius + sphereMargin));

            SubMeshesSample.CreateMultiMeshBuffer(center: new Vector3(0, 0, 0),
                                                  size: size,
                                                  xCount: xCount, yCount: yCount, zCount: zCount,
                                                  meshGeometry3D: sphereMeshGeometry3D,
                                                  vertexBuffer: out vertexBuffer,
                                                  indexBuffer: out indexBuffer);

            _multiMaterialMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer, indexBuffer,
                                                                        inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate);


            // Create all 3 SubMeshes at the beginning.
            // Though at first only the first SubMesh will be rendered (the other two have IndexCount set to 0),
            // this will allow us to simply change the StartIndexLocation and IndexCount of the SubMeshes
            // to show selected part without adding or removing any SubMesh (this would regenerate the RenderingQueues).
            // This way the selection is almost a no-op (only changing a few integer values and rendering the scene again).
            _multiMaterialMesh.SubMeshes = new SubMesh[]
            {
                // First sub-mesh will render triangles from the first to the start of selection (or all triangles if there is no selection)
                new SubMesh("MainSubMesh1")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = indexBuffer.Length
                },

                // Second sub-mesh will render triangles after the selection (this one follows the first on to preserve the same material)
                new SubMesh("MainSubMesh2")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = 0
                },

                // The third sub-mesh will render selected triangles and will use the second material for that.
                new SubMesh("SelectionSubMesh")
                {
                    MaterialIndex = 1, StartIndexLocation = 0, IndexCount = 0
                },
            };

            _disposables.Add(_multiMaterialMesh);

            // Create OctTree from vertexBuffer.
            // This will significantly improve hit testing performance (check this with uncommenting the dxScene.GetClosestHitObject call in OnMouseMouse method).
            _octTree = new OctTree(vertexBuffer, indexBuffer);


            var materials = new Ab3d.DirectX.Material[]
            {
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Green.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Red.ToColor3()
                }
            };

            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(_multiMaterialMesh, materials);

            _disposables.Add(_meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void GenerateHeightMapObject(float[,] heightData, Color4[] positionColorsArray)
        {
            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            CreateHeightVertexAndIndexBuffer(heightData,
                                             centerPosition: new Vector3(0, 0, 0),
                                             size: new Vector3(1000, 20, 200),
                                             vertexBuffer: out vertexBuffer,
                                             indexBuffer: out indexBuffer);

            var simpleMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer,
                                                                    indexBuffer,
                                                                    inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                    name: "HeightSimpleMesh");

            _disposables.Add(simpleMesh);


            Ab3d.DirectX.Material dxMaterial;

            if (positionColorsArray != null)
            {
                dxMaterial = new Ab3d.DirectX.Materials.VertexColorMaterial()
                {
                    PositionColors      = positionColorsArray, // The PositionColors property is used to specify colors for each vertex
                    CreateDynamicBuffer = false,               // We will not update the colors frequently

                    // To show specular effect set the specular data here:
                    //SpecularPower = 16,
                    //SpecularColor = Color3.White,
                    //HasSpecularColor = true
                };
            }
            else
            {
                // Solid color material:
                var diffuseMaterial = new DiffuseMaterial(Brushes.Green);

                // Texture material:
                //var imageBrush = new ImageBrush();
                //imageBrush.ImageSource = new BitmapImage(new Uri("pack://application:,,,/Resources/GrassTexture.jpg"));
                //var diffuseMaterial = new DiffuseMaterial(imageBrush);

                dxMaterial = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);
            }

            _disposables.Add(dxMaterial);

            var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, dxMaterial);

            meshObjectNode.Name = "HeightMeshObjectNode";

            _disposables.Add(meshObjectNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);


            // If you also want to render back faces of the height map you need to create another MeshObjectNode and set its IsBackFaceMaterial to true.
            // You can reuse the mesh. But this still requires almost twice the GPU power.
            var backDiffuseMaterial = new DiffuseMaterial(Brushes.Gray);
            var backDXMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(backDiffuseMaterial);

            meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, backDXMaterial);
            meshObjectNode.IsBackFaceMaterial = true;
            meshObjectNode.Name = "HeightBackMeshObjectNode";

            _disposables.Add(meshObjectNode);

            sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);
            MainViewport.Children.Add(sceneNodeVisual3D);
        }
Exemplo n.º 16
0
        private void ShowPositionsArray(Vector3[] positionsArray, float pixelSize, Color4 pixelColor, Bounds positionBounds)
        {
            BoundingBox positionsBoundingBox;

            // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.
            if (positionBounds != null && !positionBounds.IsEmpty)
            {
                // It is highly recommended to manually set the Bounds.
                positionsBoundingBox = positionBounds.BoundingBox;
            }
            else
            {
                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                positionsBoundingBox = BoundingBox.FromPoints(positionsArray);
            }


            // Create OptimizedPointMesh that will optimize rendering or positions.
            // It uses two techniques to do that:

            _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positionsArray,
                                                                   positionsBoundingBox,
                                                                   segmentsCount: 100); // All the positions are divided into 100 segments - when rendering each segment is checked if it is visible in the current camera (if not, then it is not rendered)

            // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

            _optimizedPointMesh.OptimizationIndicesNumberTreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
            _optimizedPointMesh.MaxOptimizationViewsCount         = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberTreshold or when all vertices needs to be shown.

            _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), pixelSize);

            _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);



            // We will need to dispose the SimpleMesh
            _modelDisposables.Add(_optimizedPointMesh);


            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor = pixelColor,
                PixelSize  = pixelSize,

                // By default graphics card renders objects that are closer to the camera over the objects that are farther away from the camera.
                // This means that positions that are closer to the camera will be rendered over the positions that are farther away.
                // This may distort the shown colors.
                // Therefore when using pixel colors it is better to disable depth buffer checking and render all the pixels.
                // This is done with setting ReadZBuffer and WriteZBuffer to false.
                ReadZBuffer  = false,
                WriteZBuffer = false
            };


            // It is also possible to set per-pixel colors (or per-pixel sizes with setting PixelSizes - not demonstrated here).
            // This comes with a performance drawback (see comment below).
            //
            // To test per-pixel colors, uncomment the following code:

            //var pixelColors = new Color4[positionsArray.Length];
            //for (int i = 0; i < positionsArray.Length; i++)
            //    pixelColors[i] = new Color4((i % 2 == 0) ? 1 : 0, 0, (i % 2 != 0) ? 1 : 0, 1);

            //_pixelMaterial.PixelColors = pixelColors;
            //_pixelMaterial.PixelColor = Color4.White; // When PixelColors array is used, then PixelColor is used as mask (each color in PixelColors is multiplied with PixelColor). To preserve the colors in PixelColors we need to set PixelColor to White.

            // By default the OptimizedPointCloud "combines" positions that are close together (closer the the size of one pixel on the screen).
            // and rendered only some of them. In this case it is possible that only each second point (or each tenth point) is rendered
            // and this can removes the "color mixing" in our sample.
            // In such cases is is possible to disable this optimization with setting OptimizePositions to false:
            //_optimizedPointMesh.OptimizePositions = false;
            //
            // After this the OptimizedPointCloud will only provide optimization that works with grouping positions into 100 segments
            // and then checking which segments is visible in the camera (by checking segment bounding box).
            // But when the camera is positioned in such a way that all positions are visible,
            // then all positions will be sent to graphics card - in this case the OptimizePositions can provide good results with skipping some pixels.
            //
            // But if the actual colors from your data will not have such sharp color differences (will have more gradients),
            // then this problem should not be visible.


            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _modelDisposables.Add(_pixelMaterial);


            // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
            _customRenderableNode      = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);
            _customRenderableNode.Name = "CustomRenderableNode";

            _modelDisposables.Add(_customRenderableNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(_customRenderableNode);

            //sceneNodeVisual3D.Transform = transform;

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void ShowPositionsArray(Vector3[] positionsArray, float pixelSize, Color4 pixelColor, Bounds positionBounds)
        {
            if (_isUsingPixelsVisual3D)
            {
                // The easiest way to show many pixels is to use PixelsVisual3D.
                var pixelsVisual3D = new PixelsVisual3D()
                {
                    Positions  = positionsArray,
                    PixelColor = pixelColor.ToWpfColor(),
                    PixelSize  = pixelSize
                };

                // It is highly recommended to manually set the PositionsBounds.
                // If this is not done, the bounds are calculated by the DXEngine with checking all the positions.
                pixelsVisual3D.PositionsBounds = positionBounds;

                MainViewport.Children.Add(pixelsVisual3D);

                // !!! IMPORTANT !!!
                // When PixelsVisual3D is not used any more, it needs to be disposed (we are using DisposeList to dispose all in Unloaded event handler)
                _disposables.Add(pixelsVisual3D);

                return;
            }


            // First stop in showing positions in the positionsArray as pixels is to create a SimpleMesh<Vector3>.
            // This will create a DirectX VertexBuffer that will be passed to the shaders.
            var simpleMesh = new SimpleMesh <Vector3>(vertexBufferArray: positionsArray,
                                                      indexBufferArray: null,
                                                      inputLayoutType: InputLayoutType.Position);

            simpleMesh.PrimitiveTopology = PrimitiveTopology.PointList; // We need to change the default PrimitiveTopology.TriangleList to PointList

            // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.

            if (positionBounds != null && !positionBounds.IsEmpty)
            {
                // It is highly recommended to manually set the Bounds.
                simpleMesh.Bounds = positionBounds;
            }
            else
            {
                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                simpleMesh.CalculateBounds();
            }

            simpleMesh.CalculateBounds();


            // We will need to dispose the SimpleMesh
            _disposables.Add(simpleMesh);


            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor = pixelColor,
                PixelSize  = pixelSize
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now create a new MeshObjectNode
            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, _pixelMaterial);

            _disposables.Add(_meshObjectNode);

            // To be able to add the MeshObjectNode (or any other SceneNode) to WPF's Viewport3D,
            // we need to create a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void CreateTestModels()
        {
            _rootModelVisual3D = new ModelVisual3D();
            MainViewport3D.Children.Add(_rootModelVisual3D);



            // SphereVisual3D
            _sphereVisual3D = new Ab3d.Visuals.SphereVisual3D()
            {
                CenterPosition = new Point3D(-50, 0, -50),
                Radius         = 30,
                Material       = new DiffuseMaterial(Brushes.Silver)
            };

            _sphereVisual3D.SetName("SphereVisual3D");

            _rootModelVisual3D.Children.Add(_sphereVisual3D);


            var readerObj   = new ReaderObj();
            var teapotModel = readerObj.ReadModel3D(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Models\teapot-hires.obj"));

            Ab3d.Utilities.ModelUtils.CenterAndScaleModel3D(teapotModel, centerPosition: new Point3D(50, 0, -50), finalSize: new Size3D(80, 80, 80), preserveAspectRatio: true);

            _teapotModelVisual3D = new ModelVisual3D()
            {
                Content = teapotModel
            };

            teapotModel.SetName("teapot Model3D");
            _teapotModelVisual3D.SetName("teapot ModelVisual3D");

            _rootModelVisual3D.Children.Add(_teapotModelVisual3D);


            // InstancedMeshGeometryVisual3D
            var boxMesh3D = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(6, 6, 6), 1, 1, 1);

            InstanceData[] instancedData = DXEnginePerformance.InstancedMeshGeometry3DTest.CreateInstancesData(center: new Point3D(-50, 0, 50),
                                                                                                               size: new Size3D(80, 50, 80),
                                                                                                               modelScaleFactor: 1,
                                                                                                               xCount: 5,
                                                                                                               yCount: 1,
                                                                                                               zCount: 5,
                                                                                                               useTransparency: false);

            _instancedMeshGeometryVisual3D = new InstancedMeshGeometryVisual3D(boxMesh3D.Geometry);
            _instancedMeshGeometryVisual3D.InstancesData = instancedData;

            _instancedMeshGeometryVisual3D.SetName("InstancedMeshGeometryVisual3D");
            _rootModelVisual3D.Children.Add(_instancedMeshGeometryVisual3D);



            // MeshObjectNode and SceneNodeVisual3D
            var meshGeometry3D   = new Ab3d.Meshes.PyramidMesh3D(new Point3D(50, -20, 50), new Size3D(80, 40, 80)).Geometry;
            var dxMeshGeometry3D = new Ab3d.DirectX.Models.DXMeshGeometry3D(meshGeometry3D);

            var standardMaterial = new StandardMaterial()
            {
                DiffuseColor = Colors.Gold.ToColor3()
            };

            _pyramidMeshObjectNode = new Ab3d.DirectX.MeshObjectNode(dxMeshGeometry3D, standardMaterial);

            _disposables.Add(dxMeshGeometry3D);
            _disposables.Add(_pyramidMeshObjectNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(_pyramidMeshObjectNode);

            sceneNodeVisual3D.SetName("SceneNodeVisual3D");
            _rootModelVisual3D.Children.Add(sceneNodeVisual3D);
        }
Exemplo n.º 19
0
        private void CreateSimpleDemoScene()
        {
            // When using Ab3d.PowerToys and Ab3d.DXEngine, then the standard way to render text is to use TextBlockVisual3D.
            // It provides many options and easy positioning with Position / PositionType properties. See sample in the Ab3d.PowerToys samples project for more info.
            //
            // Here we also show how to setup the alpha-clip threshold that can be used when rendering with Ab3d.DXEngine.
            // This way we do not need to sort the TextBlockVisual3D by their distance to render them correctly.

            var textBlockVisual3D = new TextBlockVisual3D()
            {
                Text                   = "TextBlockVisual3D",
                Position               = new Point3D(-190, -50, 0),
                PositionType           = PositionTypes.BottomLeft,
                TextDirection          = new Vector3D(1, 0, 0),
                UpDirection            = new Vector3D(0, 1, 0),
                Size                   = new Size(80, 40),
                Background             = Brushes.Transparent,
                RenderBitmapSize       = new Size(256, 128),
                TextPadding            = new Thickness(5, 0, 5, 0),
                BorderBrush            = Brushes.Yellow,
                BorderThickness        = new Thickness(0, 2, 0, 2),
                IsBackSidedTextFlipped = true
            };

            // Because TextBlockVisual3D usually use transparent background the rules and limitations of rendering transparent objects apply when using multiple TextBlockVisual3D objects (see Alpha clipping sample for more info).
            // With Ab3d.DXEngine a very fast transparent objects sorting can be enabled by:
            // MainDXViewportView.DXScene.IsTransparencySortingEnabled
            //
            // But instead of using transparency sorting, we enable alpha-clipping (see Alpha clipping sample for more info):
            textBlockVisual3D.SetDXAttribute(DXAttributeType.Texture_AlphaClipThreshold, 0.15f);

            MainViewport.Children.Add(textBlockVisual3D);


            // It is also possible to show 3D text with using TextVisual3D.
            // This objects renders text with using 3D lines.
            // Its disadvantage is that it supports only an old style font that was used for plotters and cannot render all the characters.


            // TextBlockVisual3D renders the text with first rendering the text with the specified border to a texture.
            // This this texture is rendered into a plane 3D object.
            // This works very well when there are not a lot of texts.
            // But if many texts need to be rendered, then the it takes long to render all texts to textures and also they take a lot of memory.
            //
            // In this case the InstancedTextNode can be used because it only rendered individual characters and then reuses
            // the textures with rendered characters to show the texts. To make rendering even more efficient object instancing is used.
            //
            //
            // The first set in using the InstancedTextNode is to create its instance.
            //
            // There we define the FontFamily and FontWeight that will be the same for all added texts.
            //
            // We also define the size of the texture that will be used to render the characters.
            // It is recommended to use size that is power of 2 - for example 64, 128, 256, etc.
            // By default the fontBitmapSize is set to 128 that rendered characters to 128 x 128 texture.
            //
            // We can also set the useDynamicInstanceBuffer to true. This would create dynamic instance buffer.
            // This is recommended when the text data is changed very ofter (color, position or visibility is changed in each frame or similar).
            // Here we change data only occasionally, so preserve the useDynamicInstanceBuffer in its default value (this is better for GPU access to the buffer)
            _instancedTextNode = new InstancedTextNode(fontFamily: new FontFamily("Arial"),
                                                       fontWeight: FontWeights.Normal,
                                                       fontBitmapSize: 128,
                                                       useDynamicInstanceBuffer: false);


            // Than we can call AddText to add individual text to the InstancedTextNode.
            // Note that to be able to show text from the back side, we need to set hasBackSide to true (this rendered twice as many objects).
            // AddText method returns an instance of InstancedText object that can be used to change the color, position, show or hide text.
            _instancedText = _instancedTextNode.AddText("Ab3d.DXEngine", Colors.Orange, new Point3D(-190, 0, 0), size: 25, hasBackSide: true);

            // To change direction and orientation of text, we can call the SetTextDirection method.
            _instancedTextNode.SetTextDirection(textDirection: new Vector3D(1, 0, 0), upDirection: new Vector3D(0, 0, -1));

            // All characters from font are supported. Also new line is supported.
            _instancedTextNode.AddText("All chars:\n@üßščžç☯", Colors.Black, new Point3D(-100, 1, 50), size: 30, hasBackSide: true);


            _instancedTextNode.SetTextDirection(textDirection: new Vector3D(1, 0, 0), upDirection: new Vector3D(0, 1, 0));
            _instancedTextNode.AddText("Right->", Colors.Red, new Point3D(0, 30, 0), size: 30, hasBackSide: true);

            _instancedTextNode.SetTextDirection(textDirection: new Vector3D(0, 0, -1), upDirection: new Vector3D(0, 1, 0));
            _instancedTextNode.AddText("Forward->", Colors.Blue, new Point3D(0, 30, 0), size: 30, hasBackSide: true);

            _instancedTextNode.SetTextDirection(textDirection: new Vector3D(0, 1, 0), upDirection: new Vector3D(-1, 0, 0));
            _instancedTextNode.AddText("UP->", Colors.Green, new Point3D(0, 70, 0), size: 30, hasBackSide: true);


            // Sample on how to create text that is flipped on the back side (so it can be correctly read from the back side)
            var startPosition = new Point3D(10, 0, 0);

            _instancedTextNode.SetTextDirection(textDirection: new Vector3D(1, 0, 0), upDirection: new Vector3D(0, 1, 0));
            var flippedFrontFaceInstancedText = _instancedTextNode.AddText("FlippedBackSideText", Colors.Green, startPosition, size: 25, hasBackSide: false);

            // Based on size of previous text (instancedText.TextBounds), we can calculate the start position for the flipped back face text:
            var backFaceStartPosition = new Point3D(startPosition.X + flippedFrontFaceInstancedText.TextBounds.SizeX, startPosition.Y, startPosition.Z);

            _instancedTextNode.SetTextDirection(textDirection: new Vector3D(-1, 0, 0), upDirection: new Vector3D(0, 1, 0)); // flip textDirection, preserve upDirection
            var flippedBackFaceInstancedText = _instancedTextNode.AddText(flippedFrontFaceInstancedText.Text, flippedFrontFaceInstancedText.Color, backFaceStartPosition, size: 25, hasBackSide: false);


            // It is also possible to call AddText that takes transformation matrix instead of position and scale.
            // This method does not use current text direction that is set with SetTextDirection:

            var transform3DGroup = new Transform3DGroup();

            transform3DGroup.Children.Add(new ScaleTransform3D(20, 70, 20)); // Note that initially the font size is 1, so we need to scale it !!!
            transform3DGroup.Children.Add(new TranslateTransform3D(-120, -120, 0));

            _instancedTextNode.AddText("Custom transform", Colors.Gray, transform3DGroup.Value, hasBackSide: true);


            // If we want to immediately create character textures, we can call the InitializeResources method.
            // Note that MainDXViewportView.DXScene must not be null (this can be called in MainDXViewportView.DXSceneDeviceCreated or MainDXViewportView.DXSceneInitialized event handler)
            //_instancedTextNode.InitializeResources(MainDXViewportView.DXScene);


            // Show the InstancedTextNode as any other DXEngine's SceneNode:
            var sceneNodeVisual1 = new SceneNodeVisual3D(_instancedTextNode);

            MainViewport.Children.Add(sceneNodeVisual1);


            // To show text with other font or with other font weight, we need to create another InstancedTextNode
            _instancedTextNode2 = new InstancedTextNode(new FontFamily("Times New Roman"), FontWeights.Bold, fontBitmapSize: 128);
            _instancedTextNode2.AddText("Text with any font", Colors.Gold, new Point3D(100, -100, 0), 30, true);

            var sceneNodeVisual2 = new SceneNodeVisual3D(_instancedTextNode2);

            MainViewport.Children.Add(sceneNodeVisual2);


            SetupDemoSceneButtons(isDemoSceneShown: true);

            UpdateCharactersCountInfo();
        }
Exemplo n.º 20
0
        private void AddPointCloudNode(Transform3D transform)
        {
            int positionsCount = _vertexBuffer.Length;
            var positions      = new Vector3[positionsCount];

            for (int i = 0; i < positionsCount; i++)
            {
                positions[i] = _vertexBuffer[i].Position;
            }


            var boundingBox = BoundingBox.FromPoints(positions);


            var optimizedPointMesh = new OptimizedPointMesh <PositionNormal>(_vertexBuffer,
                                                                             positions,
                                                                             InputLayoutType.Position | InputLayoutType.Normal,
                                                                             boundingBox,
                                                                             segmentsCount: SegmentsCount,
                                                                             name: "ShaderOptimizedPointMesh");


            float selectedPointSize = (float)PointSizeComboBox.SelectedItem;

            if (!MainDXViewportView.DXScene.BuffersInitialized)
            {
                throw new Exception("Cannot create OptimizedPointMesh without know DXScene Size");
            }

            // Use size from DXScene, because this also takes DPI settings into account and gives us the most accurate amount of available pixels (better then DXViewportView.ActualWidth / Height)
            optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), selectedPointSize);

            optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);


            _pointCloudDisposables.Add(optimizedPointMesh); // _pointsMesh is not added to disposables because it is disposed separately and can be disposed after the number of positions is changed in the DropDown


            if (_optimizedPointMeshes == null)
            {
                _optimizedPointMeshes = new List <OptimizedPointMesh <PositionNormal> >();
            }

            _optimizedPointMeshes.Add(optimizedPointMesh);


            _shadedPointCloudEffect.DiffuseColor = Colors.Orange.ToColor4();


            var customRenderableNode = new CustomRenderableNode(RenderAction, new Bounds(boundingBox), optimizedPointMesh, _effectMaterial);

            customRenderableNode.Name = "CustomRenderableNode";

            _pointCloudDisposables.Add(customRenderableNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);

            sceneNodeVisual3D.Transform = transform;

            PointCloudRootVisual3D.Children.Add(sceneNodeVisual3D);
        }
        public ColoredLinesSample()
        {
            InitializeComponent();

            var disposables = new DisposeList();

            // Both poly-line and multi-line use the same positions and positionColors
            var positions = new Vector3[]
            {
                new Vector3(-225, 50, -50),
                new Vector3(-150, 100, -50),
                new Vector3(-75, 50, -50),
                new Vector3(0, 100, -50),
                new Vector3(75, 50, -50),
                new Vector3(150, 100, -50),
            };

            var positionColors = new Color4[]
            {
                Colors.Blue.ToColor4(),
                Colors.Green.ToColor4(),
                Colors.Yellow.ToColor4(),
                Colors.Orange.ToColor4(),
                Colors.Red.ToColor4(),
                Colors.Transparent.ToColor4()
            };


            // Create a colored poly-line:
            var screenSpaceLineNode1 = CreateColoredLineNode(positions, positionColors, 10, disposables, isPolyLine: true);

            var sceneNodeVisual1 = new SceneNodeVisual3D(screenSpaceLineNode1);

            MainViewport.Children.Add(sceneNodeVisual1);


            // Create a colored multi-line:
            var screenSpaceLineNode2 = CreateColoredLineNode(positions, positionColors, 10, disposables, isPolyLine: false);

            var sceneNodeVisual2 = new SceneNodeVisual3D(screenSpaceLineNode2);

            sceneNodeVisual2.Transform = new TranslateTransform3D(0, 75, 0);
            MainViewport.Children.Add(sceneNodeVisual2);


            // Create a single line:
            // Note that if you want to render many colored lines,
            // it is better to use one multi-line that can render many lines with one draw call
            // then creating many individual lines with CreateColoredLineNode that take startPosition and endPosition.
            var screenSpaceLineNode3 = CreateColoredLineNode(startPosition:  new Vector3(-225, 0, -50),
                                                             endPosition:    new Vector3(150, 0, -50),
                                                             startLineColor: Colors.Blue.ToColor4(),
                                                             endLineColor:   Colors.Transparent.ToColor4(),
                                                             lineThickness:  10,
                                                             disposables:    disposables);

            var sceneNodeVisual3 = new SceneNodeVisual3D(screenSpaceLineNode3);

            sceneNodeVisual3.Transform = new TranslateTransform3D(0, 0, 0);
            MainViewport.Children.Add(sceneNodeVisual3);



            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
        private void MainDXViewportViewOnDXSceneDeviceCreated(object sender, EventArgs e)
        {
            var instancedData = CreateInstancesData(center: new Point3D(0, 0, 0),
                                                    size: new Size3D(4 * XInstancesCount, 4 * YInstancesCount, 4 * ZInstancesCount),
                                                    modelScaleFactor: 1,
                                                    xCount: XInstancesCount, yCount: YInstancesCount, zCount: ZInstancesCount,
                                                    useTransparency: false);

            // Update colors
            int dataCount = instancedData.Length;

            for (int i = 0; i < dataCount; i++)
            {
                float percentage = 1.0f - (float)i / (float)dataCount;
                instancedData[i].DiffuseColor = new Color4(red: percentage, green: 1, blue: percentage, alpha: 1);
            }


            var boxMeshGeometry = new Ab3d.Meshes.BoxMesh3D(centerPosition: new Point3D(0, 0, 0), size: new Size3D(3, 3, 3), xSegments: 1, ySegments: 1, zSegments: 1).Geometry;


            // The first InstancedMeshGeometry3DNode will get the instancedData and
            // will also create the DirectX instance buffer.
            _instancedMeshGeometry3DNode1 = new InstancedMeshGeometry3DNode(boxMeshGeometry);
            _instancedMeshGeometry3DNode1.SetInstanceData(instancedData);

            // Manually call InitializeResources.
            // For this to work, the dxViewportView.DXScene must be set.
            // This is the reason why this method is called inside a DXViewportView.DXSceneDeviceCreated event handler.
            _instancedMeshGeometry3DNode1.InitializeResources(MainDXViewportView.DXScene);

            _disposables.Add(_instancedMeshGeometry3DNode1);


            var instanceBuffer = _instancedMeshGeometry3DNode1.GetInstanceBuffer();

            if (instanceBuffer == null)
            {
                throw new Exception("GetInstanceBuffer returned null"); // Probably DXScene is not initialized
            }
            // Now create another 2 InstancedMeshGeometry3DNode objects
            // and initialize it with already created instanceBuffer

            // The next InstancedMeshGeometry3DNode will be also initialized so
            // that all instances will be rendered with red color instead of the color defined in the instances data.
            _instancedMeshGeometry3DNode2 = new InstancedMeshGeometry3DNode(boxMeshGeometry);
            _instancedMeshGeometry3DNode2.SetInstanceBuffer(instanceBuffer, InstanceData.SizeInBytes, instancedData.Length, instancedData);
            _instancedMeshGeometry3DNode2.UseSingleObjectColor(Colors.Red.ToColor4());
            _disposables.Add(_instancedMeshGeometry3DNode2);

            // The last InstancedMeshGeometry3DNode will render last part of the instances with the color defined in the instance data.
            _instancedMeshGeometry3DNode3 = new InstancedMeshGeometry3DNode(boxMeshGeometry);
            _instancedMeshGeometry3DNode3.SetInstanceBuffer(instanceBuffer, InstanceData.SizeInBytes, instancedData.Length, instancedData);
            _disposables.Add(_instancedMeshGeometry3DNode3);


            // Set StartInstanceIndex and InstancesCount
            _startTime         = DateTime.Now;
            _lastStartRowIndex = int.MinValue;
            UpdateHiddenInstancesPositions();


            var rootSceneNode = new SceneNode();

            rootSceneNode.AddChild(_instancedMeshGeometry3DNode1);
            rootSceneNode.AddChild(_instancedMeshGeometry3DNode2);
            rootSceneNode.AddChild(_instancedMeshGeometry3DNode3);

            var sceneNodeVisual3D = new SceneNodeVisual3D(rootSceneNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
Exemplo n.º 23
0
        // This method uses low level DXEngine objects to create tube paths.
        private void AddSpirals_MeshObjectNode(int xCount, int yCount, int spiralLength, bool useMultiThreading)
        {
            float circleRadius  = 10;
            int   spiralCircles = spiralLength / 20; // One circle in the spiral is created from 20 lines

            var dxMaterial = new Ab3d.DirectX.Materials.StandardMaterial()
            {
                DiffuseColor  = Color3.Black,
                EmissiveColor = Color3.White,
                Effect        = _solidColorEffect
            };

            _disposables.Add(dxMaterial);


            float xStart = -xCount * circleRadius * 1.5f;
            float yStart = -yCount * circleRadius * 1.5f;


            if (useMultiThreading)
            {
                // On i7 6700 with 4 cores with hyper-threading the multi-threaded code path is almost 100% faster then single threaded solution.
                var initializedMeshes = new MeshBase[xCount, yCount];

                var dxDevice = MainDXViewportView.DXScene.DXDevice;

                Parallel.For(0, xCount * yCount, xy =>
                {
                    int x = (int)xy / yCount;
                    int y = (int)xy % yCount;


                    var spiralPositions = CreateSpiralPositions(startPosition: new Vector3(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0),
                                                                circleXDirection: new Vector3(1, 0, 0),
                                                                circleYDirection: new Vector3(0, 1, 0),
                                                                oneSpiralCircleDirection: new Vector3(0, 0, -10),
                                                                circleRadius: circleRadius,
                                                                segmentsPerCircle: 20,
                                                                circles: spiralCircles);

                    MeshBase tubePathMesh = CreateTubePathMesh(spiralPositions, radius: 1.0f, segmentsCount: 8, isTubeClosed: true, tubeColor: Color4.White);

                    // Create DirectX resources in the background thread (this creates buffers on the GPU and send data there from the main memory)
                    tubePathMesh.InitializeResources(dxDevice);

                    // Save the mesh
                    initializedMeshes[x, y] = tubePathMesh;
                });

                // Now most of the work was done in multi-threaded way.
                // So we only need to create the MeshObjectNode and add that to the Scene. This needs to be done on the UI thread.
                MainViewport.BeginInit();
                MainViewport.Children.Clear();

                for (int x = 0; x < xCount; x++)
                {
                    for (int y = 0; y < yCount; y++)
                    {
                        var tubePathMesh   = initializedMeshes[x, y];
                        var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(tubePathMesh, dxMaterial);

                        var tubePathVisual3D = new SceneNodeVisual3D(meshObjectNode);

                        // IMPORTANT:
                        //
                        // In this performance demo we create new spiral positions and new tubePathMesh for each spiral.
                        // But because the spirals are the same, we could create only one spiral positions and one tubePathMesh
                        // and then use that tubePathMesh to create multiple MeshObjectNode and SceneNodeVisual3D objects
                        // each of them with its Transform property set - as shown in the line below.
                        //
                        // Sharing one mesh would provide much better performance and lower memory usage,
                        // but for this demo we want to simulate cration of huge tube paths in the background thread.
                        //
                        //tubePathVisual3D.Transform = new TranslateTransform3D(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0);


                        _disposables.Add(tubePathMesh); // We did not add that in the background thread (we would need locking for that) so we need to do that now
                        _disposables.Add(meshObjectNode);

                        MainViewport.Children.Add(tubePathVisual3D);
                    }
                }

                MainViewport.EndInit();
            }

            else
            {
                // No multi-threading
                MainViewport.BeginInit();
                MainViewport.Children.Clear();

                for (int x = 0; x < xCount; x++)
                {
                    for (int y = 0; y < yCount; y++)
                    {
                        var spiralPositions2 = CreateSpiralPositions(startPosition: new Point3D(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0),
                                                                     circleXDirection: new Vector3D(1, 0, 0),
                                                                     circleYDirection: new Vector3D(0, 1, 0),
                                                                     oneSpiralCircleDirection: new Vector3D(0, 0, -10),
                                                                     circleRadius: circleRadius,
                                                                     segmentsPerCircle: 20,
                                                                     circles: spiralCircles);

                        var spiralPositions = spiralPositions2.Select(p => p.ToVector3()).ToArray();


                        //var spiralPositions = CreateSpiralPositions(startPosition: new Vector3(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0),
                        //                                            circleXDirection: new Vector3(1, 0, 0),
                        //                                            circleYDirection: new Vector3(0, 1, 0),
                        //                                            oneSpiralCircleDirection: new Vector3(0, 0, -10),
                        //                                            circleRadius: circleRadius,
                        //                                            segmentsPerCircle: 20,
                        //                                            circles: spiralCircles);

                        var tubePathMesh = CreateTubePathMesh(spiralPositions, radius: 2, segmentsCount: 8, isTubeClosed: true, tubeColor: Color4.White);

                        var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(tubePathMesh, dxMaterial);

                        var tubePathVisual3D = new SceneNodeVisual3D(meshObjectNode);
                        //tubePathVisual3D.Transform = new TranslateTransform3D(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0);

                        _disposables.Add(meshObjectNode);

                        MainViewport.Children.Add(tubePathVisual3D);
                    }
                }

                MainViewport.EndInit();
            }
        }
Exemplo n.º 24
0
        private void InitializePointCloud(Vector3[] positions, BoundingBox positionsBounds, Color4[] positionColors, bool useOptimizedPointMesh, bool disableDepthRead, bool disableDepthWrite)
        {
            if (MainDXViewportView.DXScene == null)
            {
                return; // If this happens, then this method is called too soon (before DXEngine is initialized) or we are using WPF 3D
            }
            // First, set up the material:

            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor  = Color4.White, // When using PixelColors, PixelColor is used as a mask (multiplied with each color)
                PixelSize   = 2,
                PixelColors = positionColors,

                // By default graphics card renders objects that are closer to the camera over the objects that are farther away from the camera.
                // This means that positions that are closer to the camera will be rendered over the positions that are farther away.
                // This may distort the shown colors.
                // Therefore when using pixel colors it is better to disable depth buffer checking and render all the pixels.
                // This is done with setting ReadZBuffer and WriteZBuffer to false.
                ReadZBuffer  = !disableDepthRead,
                WriteZBuffer = !disableDepthWrite
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now set up the mesh and create SceneNode to show it

            if (useOptimizedPointMesh)
            {
                _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positions,
                                                                       positionsBounds,
                                                                       segmentsCount: 100);

                // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

                _optimizedPointMesh.OptimizationIndicesNumberThreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
                _optimizedPointMesh.MaxOptimizationViewsCount          = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberThreshold or when all vertices needs to be shown.

                _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), standardPointSize: 1);

                _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);

                _disposables.Add(_optimizedPointMesh);


                // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
                var customRenderableNode = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);
                customRenderableNode.Name = "CustomRenderableNode";
                //customRenderableNode.CustomRenderingQueue = MainDXViewportView.DXScene.BackgroundRenderingQueue;

                _disposables.Add(customRenderableNode);

                var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);
                //sceneNodeVisual3D.Transform = transform;

                MainViewport.Children.Add(sceneNodeVisual3D);
            }
            else
            {
                // Use SimpleMesh - all positions will be always rendered:

                var simpleMesh = new SimpleMesh <Vector3>(vertexBufferArray: positions,
                                                          indexBufferArray: null,
                                                          inputLayoutType: InputLayoutType.Position);

                simpleMesh.PrimitiveTopology = PrimitiveTopology.PointList; // We need to change the default PrimitiveTopology.TriangleList to PointList

                // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.

                // It is highly recommended to manually set the Bounds.
                simpleMesh.Bounds = new Bounds(positionsBounds);

                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                //simpleMesh.CalculateBounds();

                // We will need to dispose the SimpleMesh
                _disposables.Add(simpleMesh);


                // Now create a new MeshObjectNode
                _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, _pixelMaterial);

                _disposables.Add(_meshObjectNode);

                // To be able to add the MeshObjectNode (or any other SceneNode) to WPF's Viewport3D,
                // we need to create a SceneNodeVisual3D
                var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

                MainViewport.Children.Add(sceneNodeVisual3D);
            }


            Camera1.TargetPosition = positionsBounds.Center.ToWpfPoint3D();
            Camera1.Distance       = positionsBounds.ToRect3D().GetDiagonalLength();
        }
Exemplo n.º 25
0
        private void AddSimpleMesh()
        {
            // To show _meshGeometry3D with using low level DXEngine object we will do the following:
            // 1) Create a array of PositionNormalTexture data - this will represent a managed vertex buffer array.
            // 2) Create a SimpleMesh<PositionNormalTexture> object that will create an unmanaged vertex buffer from managed vertex buffer.
            // 3) Create a MeshObjectNode (derived from SceneNode) from the SimpleMesh.
            // 4) Create a SceneNodeVisual3D that will allow us to add the MeshObjectNode to the Viewport3D children.


            // 1) Create a array of PositionNormalTexture data - this will represent a managed vertex buffer array.

            int positionsCount = _meshGeometry3D.Positions.Count;

            _vertexBufferArray = new PositionNormalTexture[positionsCount];
            FillVertexBuffer(_vertexBufferArray, _meshGeometry3D.Positions, _meshGeometry3D.Normals, _meshGeometry3D.TextureCoordinates);

            var indexBuffer = new int[_meshGeometry3D.TriangleIndices.Count];

            _meshGeometry3D.TriangleIndices.CopyTo(indexBuffer, 0);


            // 2) Create a SimpleMesh<PositionNormalTexture> object that will create an unmanaged vertex buffer from managed vertex buffer.

            bool createDynamicVertexBuffer = UseDynamicBufferCheckBox.IsChecked ?? false;

            _simpleMesh = new SimpleMesh <PositionNormalTexture>(_vertexBufferArray,
                                                                 indexBuffer,
                                                                 inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                 name: "SimpleMesh-from-PositionNormalTexture-array",
                                                                 createDynamicVertexBuffer: createDynamicVertexBuffer);

            // We can also manually specify the bounds of the mesh
            // If this is not done, then the SimpleMesh will go through all positions and calculate that.
            // But because the bounds are already calculated by MeshGeometry3D, we can just use that value (we only need to convert that to DXEngine's bounds).
            _simpleMesh.Bounds = _meshGeometry3D.Bounds.ToDXEngineBounds();

            _originalMeshSizeY = _simpleMesh.Bounds.BoundingBox.Maximum.Y - _simpleMesh.Bounds.BoundingBox.Minimum.Y;

            _disposables.Add(_simpleMesh);


            var diffuseMaterial = new DiffuseMaterial(Brushes.Silver);
            var dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);


            // 3) Create a MeshObjectNode (derived from SceneNode) from the SimpleMesh.

            _meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(_simpleMesh, dxMaterial);
            _meshObjectNode.Name = "MeshObjectNode-from-SimpleMesh";

            _disposables.Add(_meshObjectNode);


            // 4) Create a SceneNodeVisual3D that will allow us to add the MeshObjectNode to the Viewport3D children.

            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);


            // Scale and translate the sceneNodeVisual3D and than add it to the scene
            AddVisual3D(sceneNodeVisual3D);
        }
        private void CreateScene()
        {
            var boxMeshGeometry3D = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(BoxSize, BoxSize, BoxSize), 1, 1, 1).Geometry;

            _oneMeshTriangleIndicesCount = boxMeshGeometry3D.TriangleIndices.Count;


            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            CreateMultiMeshBuffer(center: new Vector3(0, 0, 0),
                                  size: new Vector3(XCount * (BoxSize + BoxesMargin), YCount * (BoxSize + BoxesMargin), ZCount * (BoxSize + BoxesMargin)),
                                  xCount: XCount, yCount: YCount, zCount: ZCount,
                                  meshGeometry3D: boxMeshGeometry3D,
                                  vertexBuffer: out vertexBuffer,
                                  indexBuffer: out indexBuffer);

            _multiMaterialMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer, indexBuffer,
                                                                        inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate);


            _indexBufferLength = indexBuffer.Length;

            // i1 is at 1/4 of the height of the box
            _firstColorIndex = (int)(_indexBufferLength / 4);

            // i2 is at 3/4 of the height
            _secondColorIndex = _firstColorIndex * 3;

            _multiMaterialMesh.SubMeshes = new SubMesh[]
            {
                new SubMesh("SubMesh1")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = _firstColorIndex
                },
                new SubMesh("SubMesh2")
                {
                    MaterialIndex = 1, StartIndexLocation = _firstColorIndex, IndexCount = _secondColorIndex - _firstColorIndex
                },
                new SubMesh("SubMesh3")
                {
                    MaterialIndex = 2, StartIndexLocation = _secondColorIndex, IndexCount = _indexBufferLength - _secondColorIndex
                },
            };

            _disposables.Add(_multiMaterialMesh);


            var materials = new Ab3d.DirectX.Material[]
            {
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.DimGray.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Silver.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Gold.ToColor3()
                },
            };

            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(_multiMaterialMesh, materials);

            _disposables.Add(_meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void CreateTestScene(int totalModelsCount)
        {
            Mouse.OverrideCursor = Cursors.Wait;

            // Creating Model3DGroup takes too much time when a lot of objects are created.
            // Therefore we are creating SceneNodes (MeshObjectNode) directly.

            //var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 0, 0), new Size3D(400, 100, 400), 5, 40, 40, 40);
            //var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 0, 0), new Size3D(400, 200, 400), 5, 40, 20, 40);
            //var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 0, 0), new Size3D(400, 200, 400), 10, 10, 5, 10);
            //modelsCount = model3DGroup.Children.Count;

            //MainViewport.Children.Add(model3DGroup.CreateModelVisual3D());

            try
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                }

                if (_sceneNodeVisual3D != null)
                {
                    _mainViewport3D.Children.Remove(_sceneNodeVisual3D);

                    _sceneNodeVisual3D.SceneNode.Dispose();
                    _sceneNodeVisual3D = null;
                }


                _disposables = new DisposeList();

                var boxMesh = new BoxMesh3D(new Point3D(0, 0, 0), new Size3D(1, 1, 1), 1, 1, 1).Geometry;

                int modelsXZCount = totalModelsCount < 2500 ? 10 : 50;
                int modelsYCount  = totalModelsCount / (modelsXZCount * modelsXZCount);

                bool useSingleColor = _currentObjectsType == ObjectsTypes.SingleColorBoxes || _currentObjectsType == ObjectsTypes.SingleColorLines;

                SceneNode sceneNode;

                if (_currentObjectsType == ObjectsTypes.MultiColorBoxes || _currentObjectsType == ObjectsTypes.SingleColorBoxes)
                {
                    sceneNode = CreateBoxSceneNodes(boxMesh, new Point3D(0, 0, 0), new Size3D(500, modelsYCount * 10, 500), 5, useSingleColor, modelsXZCount, modelsYCount, modelsXZCount);
                }
                else if (_currentObjectsType == ObjectsTypes.MultiColorLines || _currentObjectsType == ObjectsTypes.SingleColorLines)
                {
                    sceneNode = CreateLineSceneNodes(new Point3D(0, 0, 0), new Size3D(500, modelsYCount * 10, 500), useSingleColor, modelsXZCount, modelsYCount, modelsXZCount, _disposables);
                }
                else if (_currentObjectsType == ObjectsTypes.MultiColorBoxesAndLines)
                {
                    sceneNode = new SceneNode();
                    var sceneNode1 = CreateBoxSceneNodes(boxMesh, new Point3D(0, -modelsYCount * 2.5 - 10, 0), new Size3D(500, modelsYCount * 5, 500), 5, useSingleColor, modelsXZCount, modelsYCount / 2, modelsXZCount);
                    var sceneNode2 = CreateLineSceneNodes(new Point3D(0, modelsYCount * 2.5 + 10, 0), new Size3D(500, modelsYCount * 5, 500), useSingleColor, modelsXZCount, modelsYCount / 2, modelsXZCount, _disposables);

                    sceneNode.AddChild(sceneNode1);
                    sceneNode.AddChild(sceneNode2);
                }
                else
                {
                    sceneNode = null;
                }

                if (sceneNode == null)
                {
                    return;
                }

                _objectsCount = totalModelsCount;

                _sceneNodeVisual3D = new SceneNodeVisual3D(sceneNode);
                _mainViewport3D.Children.Add(_sceneNodeVisual3D);

                _mainDXViewportView.Refresh();
            }
            finally
            {
                Mouse.OverrideCursor = null;
            }
        }