// Creates a ScreenSpaceLineNode with multiple positions and with different colors for positions.
        public static ScreenSpaceLineNode CreateColoredLineNode(Vector3[] positions,
                                                                Color4[] lineColors,
                                                                float lineThickness,
                                                                DisposeList disposables,
                                                                bool isPolyLine = true) // when false we create a multi-line line (each individual line is defined by 2 position)
        {
            var lineMaterial = new PositionColoredLineMaterial()
            {
                LineColor      = Color4.White, // When PositionColors are used, then LineColor is used as a mask - each color is multiplied by LineColor - use White to preserve PositionColors
                LineThickness  = lineThickness,
                PositionColors = lineColors,
                IsPolyLine     = isPolyLine
            };

            // NOTE: When rendering multi-lines we need to set isLineStrip to false
            var screenSpaceLineNode = new ScreenSpaceLineNode(positions, isLineClosed: false, isLineStrip: isPolyLine, lineMaterial: lineMaterial);

            if (disposables != null)
            {
                disposables.Add(screenSpaceLineNode);
                disposables.Add(lineMaterial);
            }

            return(screenSpaceLineNode);
        }
Esempio n. 2
0
        //private void GenerateHeightMapObject(float[,] heightData, Color4[] positionColorsArray)
        private void GenerateHeightMapSceneNodes(MeshBase heightMapMesh, Ab3d.DirectX.Material dxMaterial)
        {
            var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(heightMapMesh, dxMaterial);

            meshObjectNode.Name = "HeightMeshObjectNode";

            _disposables.Add(meshObjectNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);

            RootContentVisual3D.Children.Add(sceneNodeVisual3D);


            // If you also want to render back faces of the height map you need to create another MeshObjectNode and set its IsBackFaceMaterial to true.
            // You can reuse the mesh. But this still requires almost twice the GPU power.
            var backDiffuseMaterial = new DiffuseMaterial(Brushes.Gray);
            var backDXMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(backDiffuseMaterial);

            meshObjectNode = new Ab3d.DirectX.MeshObjectNode(heightMapMesh, backDXMaterial);
            meshObjectNode.IsBackFaceMaterial = true;
            meshObjectNode.Name = "HeightBackMeshObjectNode";

            _disposables.Add(meshObjectNode);

            sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);
            RootContentVisual3D.Children.Add(sceneNodeVisual3D);
        }
        private void InitializePointCloud(Vector3[] positions, BoundingBox positionsBounds, Color4[] positionColors)
        {
            if (MainDXViewportView.DXScene == null)
            {
                return; // If this happens, then this method is called too soon (before DXEngine is initialized) or we are using WPF 3D
            }
            // First, set up the material:

            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor  = Color4.White, // When using PixelColors, PixelColor is used as a mask (multiplied with each color)
                PixelSize   = 2,
                PixelColors = positionColors,
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now set up the mesh and create SceneNode to show it
            _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positions,
                                                                   positionsBounds,
                                                                   segmentsCount: 100);

            // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

            _optimizedPointMesh.OptimizationIndicesNumberThreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
            _optimizedPointMesh.MaxOptimizationViewsCount          = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberThreshold or when all vertices needs to be shown.

            _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), standardPointSize: 1);

            _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_optimizedPointMesh);


            // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
            var customRenderableNode = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);

            customRenderableNode.Name = "CustomRenderableNode";
            //customRenderableNode.CustomRenderingQueue = MainDXViewportView.DXScene.BackgroundRenderingQueue;

            _disposables.Add(customRenderableNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);

            //sceneNodeVisual3D.Transform = transform;

            MainViewport.Children.Add(sceneNodeVisual3D);


            Camera1.TargetPosition = positionsBounds.Center.ToWpfPoint3D();
            Camera1.Distance       = positionsBounds.ToRect3D().GetDiagonalLength() * 0.5;
        }
        void CreateColoredPolyLine(int count, int layer)
        {
            Vector3 preV    = new Vector3(0, layer, 0);
            Color4  preC    = Color4.White;
            var     points  = new List <Point3D>();
            var     vectors = new List <Vector3>();
            var     colors  = new List <SharpDX.Color4>();

            for (int i = 0; i < count; i++)
            {
                float   max = 1f;
                Vector3 v   = new Vector3(1, layer, 0);
                v.X += preV.X;
                //Console.WriteLine("{0} : {1} {2} {3}", i, v.X, v.Y, v.Z);
                colors.Add(preC);
                points.Add(new Point3D((double)v.X, (double)v.Y, (double)v.Z));
                vectors.Add(v);
                preV = v;
            }

            DisposeList disposables   = new DisposeList();
            float       lineThickness = 5;
            bool        isPolyLine    = false;
            var         lineMaterial  = new PositionColoredLineMaterial()
            {
                LineColor      = Color4.White, // When PositionColors are used, then LineColor is used as a mask - each color is multiplied by LineColor - use White to preserve PositionColors
                LineThickness  = lineThickness,
                PositionColors = colors.ToArray(),
                IsPolyLine     = isPolyLine
            };

            // NOTE: When rendering multi-lines we need to set isLineStrip to false
            var screenSpaceLineNode = new ScreenSpaceLineNode(vectors.ToArray(), isLineClosed: false, isLineStrip: true, lineMaterial: lineMaterial);

            if (disposables != null)
            {
                disposables.Add(screenSpaceLineNode);
                disposables.Add(lineMaterial);
            }

            var sceneNodeVisual = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual);
            bool isVisualConnected;
            var  lineSelectorData = new LineSelectorData(points, true);

            lineSelectorData.PositionsTransform3D = Ab3d.Utilities.TransformationsHelper.GetVisual3DTotalTransform(sceneNodeVisual, true, out isVisualConnected);
            _lineSelectorData.Add(lineSelectorData);
            var _data = new WLineRenderData(screenSpaceLineNode, lineSelectorData, lineMaterial, colors);

            lrData.Add(_data);
        }
        private PhysicallyBasedMaterial CreatePbrMaterial(AssimpMaterial assimpMaterial, string filePath, string customTexturesFolder, bool useStrictFileNameMatch, bool supportDDSTextures)
        {
            PhysicallyBasedMaterial physicallyBasedMaterial;

            if (_dxMaterials.TryGetValue(assimpMaterial, out physicallyBasedMaterial)) // Is PhysicallyBasedMaterial already creared
            {
                Log($"  Material: {assimpMaterial.Name ?? ""} (already defined)");
                return(physicallyBasedMaterial);
            }

            //if (!assimpMaterial.HasTextureDiffuse)
            //{
            //    Log($"  Material {assimpMaterial.Name ?? ""} does not define a diffuse texture");
            //    return null;
            //}


            Log($"  Material {assimpMaterial.Name ?? ""}:");


            physicallyBasedMaterial = new PhysicallyBasedMaterial();

            // When materials has diffuse texture defined, then we also try to find other PBR textures
            if (assimpMaterial.HasTextureDiffuse)
            {
                AddPBRTextures(assimpMaterial, filePath, customTexturesFolder, useStrictFileNameMatch, supportDDSTextures, physicallyBasedMaterial);
            }


            // Set BaseColor based on the DiffuseColor
            if (assimpMaterial.HasColorDiffuse)
            {
                physicallyBasedMaterial.BaseColor = new Color4(assimpMaterial.ColorDiffuse.R, assimpMaterial.ColorDiffuse.G, assimpMaterial.ColorDiffuse.B, assimpMaterial.ColorDiffuse.A);
            }

            // When there is no Metalness texture defined, then set Metalness to zero - use plastic
            if (!physicallyBasedMaterial.HasTextureMap(TextureMapTypes.Metalness))
            {
                physicallyBasedMaterial.Metalness = 0;
            }


            _disposables.Add(physicallyBasedMaterial);
            _dxMaterials.Add(assimpMaterial, physicallyBasedMaterial);

            AddTextureMapSelections(assimpMaterial.Name, physicallyBasedMaterial);

            return(physicallyBasedMaterial);
        }
        private DiffuseMaterial CreateDiffuseMaterial(string textureFileName)
        {
            DiffuseMaterial material;

            if (textureFileName.EndsWith(".dds", StringComparison.OrdinalIgnoreCase))
            {
                var dxScene = MainDXViewportView.DXScene;
                if (dxScene == null) // In case of WPF 3D rendering
                {
                    return(null);
                }


                // The easiest way to load image file and in the same time create a material with the loaded texture is to use the CreateStandardTextureMaterial method.
                var standardMaterial = Ab3d.DirectX.TextureLoader.CreateStandardTextureMaterial(MainDXViewportView.DXScene.DXDevice, textureFileName);

                // We need to manually dispose the created StandardMaterial and ShaderResourceView
                _disposables.Add(standardMaterial);
                _disposables.Add(standardMaterial.DiffuseTextures[0]);


                bool isAlphaToCoverageEnabled = AlphaToCoverageRadioButton.IsChecked ?? false;
                bool isAlphaClippingEnabled   = AlphaClippingRadioButton.IsChecked ?? false;

                standardMaterial.AlphaClipThreshold = isAlphaClippingEnabled
                                                        ? GetAlphaClippingThreshold()
                                                        : 0; // 0 disables alpha clipping

                // When AlphaToCoverage is enabled, then we need to set the TextureBlendState to AlphaToCoverage.
                // If this is not done (if TextureBlendState is null), then the bend state is set to Opaque or PremultipliedAlphaBlend (when the texture has transparency)
                if (isAlphaToCoverageEnabled)
                {
                    standardMaterial.TextureBlendState = dxScene.DXDevice.CommonStates.AlphaToCoverage;
                }

                material = new DiffuseMaterial();
                material.SetUsedDXMaterial(standardMaterial);
            }
            else
            {
                var bitmapImage = new BitmapImage(new Uri(textureFileName, UriKind.Absolute));
                material = new DiffuseMaterial(new ImageBrush(bitmapImage));

                // When using WPF material, we need to set special DXEngine attributes with using SetDXAttribute (this is done in the following method):
                SetMaterialsDXAttributes(material);
            }

            return(material);
        }
Esempio n. 7
0
        public OptimizedTubePathSample()
        {
            InitializeComponent();


            _disposables = new DisposeList();

            MainDXViewportView.DXSceneDeviceCreated += delegate(object sender, EventArgs args)
            {
                _solidColorEffect = MainDXViewportView.DXScene.DXDevice.EffectsManager.GetEffect <SolidColorEffect>();
                _disposables.Add(_solidColorEffect);

                _stopwatch = new Stopwatch();
                _stopwatch.Start();

                AddSpirals_MeshObjectNode(10, 10, 5000, useMultiThreading: true);

                // Uncomment to see how the tubes are created in a standard Ab3d.PowerToys way.
                //AddSpirals_TubePathVisual3D(10, 10, 5000);

                // To see how to use instancing to draw tube paths, uncomment the following line.
                // Note: In this case is instancing slower then rendering a fixed geometry because the task for the GPU is much more complicated in case of instancing.
                //AddInstancedSpirals(10, 10, 5000);
            };

            // Subscribe to get event when the first frame is rendered
            MainDXViewportView.SceneRendered += MainDXViewportViewOnSceneRendered;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
            };
        }
        private void MainDxViewportViewOnDxSceneDeviceCreated(object sender, EventArgs e)
        {
            // Create a SolidColorEffect that will be used to render each objects with a color from object's id
            _solidColorEffect = new SolidColorEffect();
            _solidColorEffect.OverrideModelColor = true; // We will overwrite the object's color with color specified in SolidColorEffect.Color

            _disposables.Add(_solidColorEffect);

            MainDXViewportView.DXScene.DXDevice.EffectsManager.RegisterEffect(_solidColorEffect);


            // Create a custom rendering step that will be used instead of standard rendering step
            _objectIdRenderingStep = new CustomActionRenderingStep("ObjectIdRenderingStep")
            {
                CustomAction = ObjectIdRenderingAction,
                IsEnabled    = false
            };

            MainDXViewportView.DXScene.RenderingSteps.AddAfter(MainDXViewportView.DXScene.DefaultRenderObjectsRenderingStep, _objectIdRenderingStep);

            // In this sample we render Object ids to a custom bitmap,
            // so for standard rendering, we disable our custom rendering.
            // But if you went you can enable it and disabled the standard rendering - this will always render objects ids:
            //_objectIdRenderingStep.IsEnabled = true;
            //MainDXViewportView.DXScene.DefaultRenderObjectsRenderingStep.IsEnabled = false;
        }
Esempio n. 9
0
        private ScreenSpaceLineNode CreateLinesWithPositions(Vector3[] linePositions, bool isLineStrip, bool isPolyLine, bool isLineClosed, Color lineColor, float xOffset)
        {
            var lineMaterial = CreateLineMaterial(isPolyLine, lineColor);

            var screenSpaceLineNode = new ScreenSpaceLineNode(linePositions, isLineStrip, isLineClosed, lineMaterial);

            screenSpaceLineNode.Transform = new Transformation(SharpDX.Matrix.Translation(xOffset, 0, 0));

            // To show ScreenSpaceLineNode in DXViewportView we need to put it inside a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual3D);

            _disposables.Add(screenSpaceLineNode);
            _disposables.Add(lineMaterial);

            return(screenSpaceLineNode);
        }
Esempio n. 10
0
        private void SetupObjectOutlinesRenderingStep()
        {
            // One way to show object outlines is to render the whole scene with black color (using SolidColorEffect)
            // and with expanding the geometry of each object in the direction of triangle normals (setting SolidColorEffect.OutlineThickness property)
            // Then the standard 3D scene is rendered on top of the black 3D scene.

            _blackOutlineEffect = new SolidColorEffect()
            {
                Color = Color4.Black,
                OverrideModelColor = true,  // This will render all objects with Black color; when false then object's color is used
                OutlineThickness   = 3,

                // Use the following 3 settings to show outline for the whole 3D scene:
                WriteMaxDepthValue      = true,                                                      // the black objects will be written to the back of all the objects (using max depth value).
                OverrideRasterizerState = _mainDXViewportView.DXScene.DXDevice.CommonStates.CullNone // render front and back triangles
            };

            _disposables.Add(_blackOutlineEffect);

            _blackOutlineEffect.InitializeResources(_mainDXViewportView.DXScene.DXDevice);

            // Add another RenderObjectsRenderingStep that will render black scene ...
            _renderObjectOutlinesRenderingStep = new RenderObjectsRenderingStep("RenderObjectOutlinesRenderingStep")
            {
                OverrideEffect = _blackOutlineEffect,
                FilterRenderingQueuesFunction = delegate(RenderingQueue queue)
                {
                    return(queue != _mainDXViewportView.DXScene.LineGeometryRenderingQueue);  // Render all objects except 3D lines
                }
            };

            _disposables.Add(_blackOutlineEffect);

            // ... and add it before standard RenderObjectsRenderingStep
            _mainDXViewportView.DXScene.RenderingSteps.AddBefore(_mainDXViewportView.DXScene.DefaultRenderObjectsRenderingStep, _renderObjectOutlinesRenderingStep);


            // Uncomment the following line to prevent rendering standard 3D objects:
            //MainDXViewportView.DXScene.DefaultRenderObjectsRenderingStep.IsEnabled = false;
        }
Esempio n. 11
0
        private void AddLines(Point3D startPosition, int positionsCount, Color lineColor, bool readZBuffer = true, bool writeZBuffer = true, RenderingQueue customRenderingQueue = null)
        {
            Vector3[] positions = new Vector3[positionsCount * 2];
            Vector3   position  = startPosition.ToVector3();

            int index = 0;

            for (int i = 0; i < positionsCount; i++)
            {
                positions[index]     = position;
                positions[index + 1] = position + new Vector3(40, 0, 0);

                index    += 2;
                position += new Vector3(0, 0, 10);
            }

            // ThickLineEffect that renders the 3D lines can use the ReadZBuffer and WriteZBuffer values from LineMaterial.
            //
            // When ReadZBuffer is false (true by default), then line is rendered without checking the depth buffer -
            // so it is always rendered even it is is behind some other 3D object and should not be visible from the camera).
            //
            // When WriteZBuffer is false (true by default), then when rendering the 3D line, the depth of the line is not
            // written to the depth buffer. So No other object will be made hidden by the line even if that object is behind the line.
            var lineMaterial = new LineMaterial()
            {
                LineColor     = lineColor.ToColor4(),
                LineThickness = 2,
                ReadZBuffer   = readZBuffer,
                WriteZBuffer  = writeZBuffer
            };

            _disposables.Add(lineMaterial);


            var screenSpaceLineNode = new ScreenSpaceLineNode(positions, isLineStrip: false, isLineClosed: false, lineMaterial: lineMaterial);

            // It is also needed that the 3D line is put to the Background or Overlay rendering queue so that it is rendered before or after other 3D objects.
            screenSpaceLineNode.CustomRenderingQueue = customRenderingQueue;

            var sceneNodeVisual3D = new SceneNodeVisual3D(screenSpaceLineNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
Esempio n. 12
0
        private void ShowPositionsArray(Vector3[] positionsArray, float pixelSize, Color4 pixelColor, Bounds positionBounds)
        {
            BoundingBox positionsBoundingBox;

            // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.
            if (positionBounds != null && !positionBounds.IsEmpty)
            {
                // It is highly recommended to manually set the Bounds.
                positionsBoundingBox = positionBounds.BoundingBox;
            }
            else
            {
                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                positionsBoundingBox = BoundingBox.FromPoints(positionsArray);
            }


            // Create OptimizedPointMesh that will optimize rendering or positions.
            // It uses two techniques to do that:

            _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positionsArray,
                                                                   positionsBoundingBox,
                                                                   segmentsCount: 100); // All the positions are divided into 100 segments - when rendering each segment is checked if it is visible in the current camera (if not, then it is not rendered)

            // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

            _optimizedPointMesh.OptimizationIndicesNumberTreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
            _optimizedPointMesh.MaxOptimizationViewsCount         = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberTreshold or when all vertices needs to be shown.

            _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), pixelSize);

            _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);



            // We will need to dispose the SimpleMesh
            _modelDisposables.Add(_optimizedPointMesh);


            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor = pixelColor,
                PixelSize  = pixelSize,

                // By default graphics card renders objects that are closer to the camera over the objects that are farther away from the camera.
                // This means that positions that are closer to the camera will be rendered over the positions that are farther away.
                // This may distort the shown colors.
                // Therefore when using pixel colors it is better to disable depth buffer checking and render all the pixels.
                // This is done with setting ReadZBuffer and WriteZBuffer to false.
                ReadZBuffer  = false,
                WriteZBuffer = false
            };


            // It is also possible to set per-pixel colors (or per-pixel sizes with setting PixelSizes - not demonstrated here).
            // This comes with a performance drawback (see comment below).
            //
            // To test per-pixel colors, uncomment the following code:

            //var pixelColors = new Color4[positionsArray.Length];
            //for (int i = 0; i < positionsArray.Length; i++)
            //    pixelColors[i] = new Color4((i % 2 == 0) ? 1 : 0, 0, (i % 2 != 0) ? 1 : 0, 1);

            //_pixelMaterial.PixelColors = pixelColors;
            //_pixelMaterial.PixelColor = Color4.White; // When PixelColors array is used, then PixelColor is used as mask (each color in PixelColors is multiplied with PixelColor). To preserve the colors in PixelColors we need to set PixelColor to White.

            // By default the OptimizedPointCloud "combines" positions that are close together (closer the the size of one pixel on the screen).
            // and rendered only some of them. In this case it is possible that only each second point (or each tenth point) is rendered
            // and this can removes the "color mixing" in our sample.
            // In such cases is is possible to disable this optimization with setting OptimizePositions to false:
            //_optimizedPointMesh.OptimizePositions = false;
            //
            // After this the OptimizedPointCloud will only provide optimization that works with grouping positions into 100 segments
            // and then checking which segments is visible in the camera (by checking segment bounding box).
            // But when the camera is positioned in such a way that all positions are visible,
            // then all positions will be sent to graphics card - in this case the OptimizePositions can provide good results with skipping some pixels.
            //
            // But if the actual colors from your data will not have such sharp color differences (will have more gradients),
            // then this problem should not be visible.


            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _modelDisposables.Add(_pixelMaterial);


            // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
            _customRenderableNode      = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);
            _customRenderableNode.Name = "CustomRenderableNode";

            _modelDisposables.Add(_customRenderableNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(_customRenderableNode);

            //sceneNodeVisual3D.Transform = transform;

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void SetupExpandPostProcessOutlines()
        {
            var dxScene = MainDXViewportView.DXScene;

            _blackOutlineEffect = EnsureSolidColorEffect();

            if (_blackOutlineEffect == null)
            {
                return;
            }

            // Reset values that may be changed when using SolidColorEffectWithOutlines
            _blackOutlineEffect.DepthBias = 0;
            _blackOutlineEffect.OverrideRasterizerState = null;
            _blackOutlineEffect.OutlineThickness        = 0;
            _blackOutlineEffect.WriteMaxDepthValue      = true;

            _renderObjectOutlinesRenderingStep = EnsureRenderObjectsRenderingStep(dxScene);

            if (!dxScene.RenderingSteps.Contains(_renderObjectOutlinesRenderingStep))
            {
                dxScene.RenderingSteps.AddBefore(dxScene.DefaultRenderObjectsRenderingStep, _renderObjectOutlinesRenderingStep);
            }


            int outlineWidth = (int)OutlineWidthComboBox.SelectedItem;

            if (_prepareExpandObjectsPostProcessingRenderingStep == null)
            {
                // Expand post process is done in two passes (one horizontal and one vertical)
                _horizontalExpandPostProcess = new Ab3d.DirectX.PostProcessing.ExpandPostProcess(isVerticalRenderingPass: false, expansionWidth: outlineWidth, backgroundColor: dxScene.BackgroundColor);
                _verticalExpandPostProcess   = new Ab3d.DirectX.PostProcessing.ExpandPostProcess(isVerticalRenderingPass: true, expansionWidth: outlineWidth, backgroundColor: dxScene.BackgroundColor);

                _disposables.Add(_horizontalExpandPostProcess);
                _disposables.Add(_verticalExpandPostProcess);

                var expandPostProcesses = new PostProcessBase[]
                {
                    _horizontalExpandPostProcess,
                    _verticalExpandPostProcess
                };

                // To execute the post processes we need to rendering steps:
                // 1) PreparePostProcessingRenderingStep that creates required RenderTargets and ShaderResourceViews and sets that to the RenderPostProcessingRenderingStep
                // 2) RenderPostProcessingRenderingStep that actually executed all the post-processes

                // Because we will execute post-processes before the standard scene rendering,
                // we also need to make sure that the Destination buffer is correctly set (see _prepareExandObjectsPostProcessingRenderingStep.BeforeRunningStep)
                // and that the DepthStencilView is reset after the post-processes are rendered (see _expandObjectsPostProcessesRenderingSteps.AfterRunningStep).

                // First create the RenderPostProcessingRenderingStep because it is needed in the constructor of the PreparePostProcessingRenderingStep
                _expandObjectsPostProcessesRenderingSteps = new RenderPostProcessingRenderingStep("Expand objects rendering step", expandPostProcesses);
                _expandObjectsPostProcessesRenderingSteps.AfterRunningStep += delegate(object sender, RenderingEventArgs args)
                {
                    // Post-processes are usually executed at the end of rendering process and work on 2D textures so they do not require DepthStencil.
                    // The CurrentBackBuffer / Description and RenderTargetView are already correct because they are sent by PreparePostProcessingRenderingStep,
                    // but we need to set the _savedDepthStencilView and SupersamplingCount.
                    args.RenderingContext.SetBackBuffer(args.RenderingContext.CurrentBackBuffer,
                                                        args.RenderingContext.CurrentBackBufferDescription,
                                                        args.RenderingContext.CurrentRenderTargetView,
                                                        _savedDepthStencilView,
                                                        dxScene.SupersamplingCount,
                                                        bindNewRenderTargetsToDeviceContext: true);
                };

                _disposables.Add(_expandObjectsPostProcessesRenderingSteps);


                _prepareExpandObjectsPostProcessingRenderingStep = new PreparePostProcessingRenderingStep(_expandObjectsPostProcessesRenderingSteps, "Prepare expand post process");
                _prepareExpandObjectsPostProcessingRenderingStep.BeforeRunningStep += delegate(object sender, RenderingEventArgs args)
                {
                    // Because after the post-processes are executed we will continue with rendering the scene,
                    // we need to set the DestinationBackBuffer (there will be the final result of the post-processes)
                    // to the currently used BackBuffer. If this is not done, then RenderingContext.FinalBackBuffer is used as destination back buffer.
                    _prepareExpandObjectsPostProcessingRenderingStep.SetCustomDestinationBackBuffer(args.RenderingContext.CurrentBackBuffer,
                                                                                                    args.RenderingContext.CurrentBackBufferDescription,
                                                                                                    args.RenderingContext.CurrentRenderTargetView);

                    // Save CurrentDepthStencilView
                    _savedDepthStencilView = args.RenderingContext.CurrentDepthStencilView;
                };

                _disposables.Add(_prepareExpandObjectsPostProcessingRenderingStep);
            }
            else
            {
                _horizontalExpandPostProcess.ExpansionWidth = outlineWidth;
                _verticalExpandPostProcess.ExpansionWidth   = outlineWidth;
            }


            if (!dxScene.RenderingSteps.Contains(_prepareExpandObjectsPostProcessingRenderingStep))
            {
                dxScene.RenderingSteps.AddAfter(_renderObjectOutlinesRenderingStep, _prepareExpandObjectsPostProcessingRenderingStep);
                dxScene.RenderingSteps.AddAfter(_prepareExpandObjectsPostProcessingRenderingStep, _expandObjectsPostProcessesRenderingSteps);
            }
        }
        private PhysicallyBasedMaterial CreatePbrMaterial(AssimpMaterial assimpMaterial, string filePath, string customTexturesFolder, bool useStrictFileNameMatch, bool supportDDSTextures)
        {
            PhysicallyBasedMaterial physicallyBasedMaterial;

            if (_dxMaterials.TryGetValue(assimpMaterial, out physicallyBasedMaterial))
            {
                Log($"  Material: {assimpMaterial.Name ?? ""} (already defined)");
                return(physicallyBasedMaterial);
            }

            if (!assimpMaterial.HasTextureDiffuse)
            {
                Log($"  Material {assimpMaterial.Name ?? ""} does not define a diffuse texture");
                return(null);
            }


            Log($"  Material {assimpMaterial.Name ?? ""}:");

            //PhysicallyBasedMaterial physicallyBasedMaterial;
            string diffuseTextureFileName = assimpMaterial.TextureDiffuse.FilePath;

            if (!string.IsNullOrEmpty(customTexturesFolder))
            {
                diffuseTextureFileName = System.IO.Path.Combine(customTexturesFolder, System.IO.Path.GetFileName(diffuseTextureFileName));
            }
            else if (!System.IO.Path.IsPathRooted(diffuseTextureFileName))
            {
                diffuseTextureFileName = System.IO.Path.Combine(filePath, diffuseTextureFileName);
            }

            string folderName = System.IO.Path.GetDirectoryName(diffuseTextureFileName);

            if (!System.IO.Directory.Exists(folderName))
            {
                Log($"  Folder for diffuse texture does not exist: {folderName ?? ""}:");
                return(null);
            }


            if (_folderImageFiles == null)
            {
                _folderImageFiles = new Dictionary <string, string[]>();
            }

            string[] allFilesInFolder;

            if (!_folderImageFiles.TryGetValue(folderName, out allFilesInFolder))
            {
                allFilesInFolder = System.IO.Directory.GetFiles(folderName, "*.*", SearchOption.TopDirectoryOnly);
                _folderImageFiles.Add(folderName, allFilesInFolder);
            }


            string fileNameWithoutKnownSuffix = KnownTextureFiles.GetFileNameWithoutKnownSuffix(diffuseTextureFileName);

            // Get material files that start with the diffuse texture file name without a suffix
            List <string> materialFiles;

            if (useStrictFileNameMatch)
            {
                materialFiles = allFilesInFolder.Where(f => fileNameWithoutKnownSuffix == KnownTextureFiles.GetFileNameWithoutKnownSuffix(f)).ToList();
            }
            else
            {
                materialFiles = allFilesInFolder.Where(f => f.IndexOf(fileNameWithoutKnownSuffix, 0, StringComparison.OrdinalIgnoreCase) != -1).ToList();
            }


            _textureFiles.Clear();

            if (materialFiles.Count == 0)
            {
                Log($"   Folder ({folderName}) for {assimpMaterial.Name ?? ""} material does not define any texture files");
                return(null);
            }
            else
            {
                bool hasDiffuseTexture = false;
                foreach (var materialFile in materialFiles)
                {
                    if (!TextureLoader.IsSupportedFile(materialFile, supportDDSTextures)) // Skip unsupported files
                    {
                        continue;
                    }

                    var textureMapType = KnownTextureFiles.GetTextureType(materialFile);
                    if (textureMapType == TextureMapTypes.Unknown)
                    {
                        if (!hasDiffuseTexture)
                        {
                            textureMapType = TextureMapTypes.DiffuseColor; // First unknown file type is considered to be diffuse texture file
                        }
                        else
                        {
                            continue; // Unknown file type
                        }
                    }

                    bool isDiffuseTexture = (textureMapType == TextureMapTypes.DiffuseColor ||
                                             textureMapType == TextureMapTypes.Albedo ||
                                             textureMapType == TextureMapTypes.BaseColor);

                    string existingTextureFileName;
                    if (_textureFiles.TryGetValue(textureMapType, out existingTextureFileName))
                    {
                        // Map for this texture type already exist
                        var existingFileExtension = System.IO.Path.GetExtension(existingTextureFileName);
                        if (existingFileExtension != null && existingFileExtension.Equals(".dds", StringComparison.OrdinalIgnoreCase))
                        {
                            continue; // DDS texture already found for this texture type - we will use existing dds texture
                        }
                    }


                    hasDiffuseTexture |= isDiffuseTexture;

                    _textureFiles.Add(textureMapType, materialFile);

                    Log("    " + textureMapType + ": " + System.IO.Path.GetFileName(materialFile));
                }

                if (_textureFiles.Count > 0)
                {
                    physicallyBasedMaterial = new PhysicallyBasedMaterial();

                    foreach (var oneTextureFile in _textureFiles)
                    {
                        var textureType = oneTextureFile.Key;
                        var oneFileName = oneTextureFile.Value;

                        ShaderResourceView shaderResourceView;

                        if (!_texturesCache.TryGetValue(oneFileName, out shaderResourceView))
                        {
                            var convertTo32bppPRGBA = (textureType == TextureMapTypes.BaseColor ||
                                                       textureType == TextureMapTypes.Albedo ||
                                                       textureType == TextureMapTypes.DiffuseColor);

                            shaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.DXDevice.Device, oneFileName, loadDdsIfPresent: false, convertTo32bppPRGBA: convertTo32bppPRGBA);

                            physicallyBasedMaterial.TextureMaps.Add(new TextureMapInfo((Ab3d.DirectX.Materials.TextureMapTypes)textureType, shaderResourceView, null, oneFileName));

                            _texturesCache.Add(oneFileName, shaderResourceView);
                        }
                    }

                    _dxMaterials.Add(assimpMaterial, physicallyBasedMaterial);
                    _disposables.Add(physicallyBasedMaterial);
                }
            }

            return(physicallyBasedMaterial);
        }
Esempio n. 15
0
        // This method uses low level DXEngine objects to create tube paths.
        private void AddSpirals_MeshObjectNode(int xCount, int yCount, int spiralLength, bool useMultiThreading)
        {
            float circleRadius  = 10;
            int   spiralCircles = spiralLength / 20; // One circle in the spiral is created from 20 lines

            var dxMaterial = new Ab3d.DirectX.Materials.StandardMaterial()
            {
                DiffuseColor  = Color3.Black,
                EmissiveColor = Color3.White,
                Effect        = _solidColorEffect
            };

            _disposables.Add(dxMaterial);


            float xStart = -xCount * circleRadius * 1.5f;
            float yStart = -yCount * circleRadius * 1.5f;


            if (useMultiThreading)
            {
                // On i7 6700 with 4 cores with hyper-threading the multi-threaded code path is almost 100% faster then single threaded solution.
                var initializedMeshes = new MeshBase[xCount, yCount];

                var dxDevice = MainDXViewportView.DXScene.DXDevice;

                Parallel.For(0, xCount * yCount, xy =>
                {
                    int x = (int)xy / yCount;
                    int y = (int)xy % yCount;


                    var spiralPositions = CreateSpiralPositions(startPosition: new Vector3(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0),
                                                                circleXDirection: new Vector3(1, 0, 0),
                                                                circleYDirection: new Vector3(0, 1, 0),
                                                                oneSpiralCircleDirection: new Vector3(0, 0, -10),
                                                                circleRadius: circleRadius,
                                                                segmentsPerCircle: 20,
                                                                circles: spiralCircles);

                    MeshBase tubePathMesh = CreateTubePathMesh(spiralPositions, radius: 1.0f, segmentsCount: 8, isTubeClosed: true, tubeColor: Color4.White);

                    // Create DirectX resources in the background thread (this creates buffers on the GPU and send data there from the main memory)
                    tubePathMesh.InitializeResources(dxDevice);

                    // Save the mesh
                    initializedMeshes[x, y] = tubePathMesh;
                });

                // Now most of the work was done in multi-threaded way.
                // So we only need to create the MeshObjectNode and add that to the Scene. This needs to be done on the UI thread.
                MainViewport.BeginInit();
                MainViewport.Children.Clear();

                for (int x = 0; x < xCount; x++)
                {
                    for (int y = 0; y < yCount; y++)
                    {
                        var tubePathMesh   = initializedMeshes[x, y];
                        var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(tubePathMesh, dxMaterial);

                        var tubePathVisual3D = new SceneNodeVisual3D(meshObjectNode);

                        // IMPORTANT:
                        //
                        // In this performance demo we create new spiral positions and new tubePathMesh for each spiral.
                        // But because the spirals are the same, we could create only one spiral positions and one tubePathMesh
                        // and then use that tubePathMesh to create multiple MeshObjectNode and SceneNodeVisual3D objects
                        // each of them with its Transform property set - as shown in the line below.
                        //
                        // Sharing one mesh would provide much better performance and lower memory usage,
                        // but for this demo we want to simulate cration of huge tube paths in the background thread.
                        //
                        //tubePathVisual3D.Transform = new TranslateTransform3D(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0);


                        _disposables.Add(tubePathMesh); // We did not add that in the background thread (we would need locking for that) so we need to do that now
                        _disposables.Add(meshObjectNode);

                        MainViewport.Children.Add(tubePathVisual3D);
                    }
                }

                MainViewport.EndInit();
            }

            else
            {
                // No multi-threading
                MainViewport.BeginInit();
                MainViewport.Children.Clear();

                for (int x = 0; x < xCount; x++)
                {
                    for (int y = 0; y < yCount; y++)
                    {
                        var spiralPositions2 = CreateSpiralPositions(startPosition: new Point3D(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0),
                                                                     circleXDirection: new Vector3D(1, 0, 0),
                                                                     circleYDirection: new Vector3D(0, 1, 0),
                                                                     oneSpiralCircleDirection: new Vector3D(0, 0, -10),
                                                                     circleRadius: circleRadius,
                                                                     segmentsPerCircle: 20,
                                                                     circles: spiralCircles);

                        var spiralPositions = spiralPositions2.Select(p => p.ToVector3()).ToArray();


                        //var spiralPositions = CreateSpiralPositions(startPosition: new Vector3(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0),
                        //                                            circleXDirection: new Vector3(1, 0, 0),
                        //                                            circleYDirection: new Vector3(0, 1, 0),
                        //                                            oneSpiralCircleDirection: new Vector3(0, 0, -10),
                        //                                            circleRadius: circleRadius,
                        //                                            segmentsPerCircle: 20,
                        //                                            circles: spiralCircles);

                        var tubePathMesh = CreateTubePathMesh(spiralPositions, radius: 2, segmentsCount: 8, isTubeClosed: true, tubeColor: Color4.White);

                        var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(tubePathMesh, dxMaterial);

                        var tubePathVisual3D = new SceneNodeVisual3D(meshObjectNode);
                        //tubePathVisual3D.Transform = new TranslateTransform3D(x * circleRadius * 3 + xStart, y * circleRadius * 3 + yStart, 0);

                        _disposables.Add(meshObjectNode);

                        MainViewport.Children.Add(tubePathVisual3D);
                    }
                }

                MainViewport.EndInit();
            }
        }
        private void CreateScene()
        {
            var boxMeshGeometry3D = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(BoxSize, BoxSize, BoxSize), 1, 1, 1).Geometry;

            _oneMeshTriangleIndicesCount = boxMeshGeometry3D.TriangleIndices.Count;


            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            CreateMultiMeshBuffer(center: new Vector3(0, 0, 0),
                                  size: new Vector3(XCount * (BoxSize + BoxesMargin), YCount * (BoxSize + BoxesMargin), ZCount * (BoxSize + BoxesMargin)),
                                  xCount: XCount, yCount: YCount, zCount: ZCount,
                                  meshGeometry3D: boxMeshGeometry3D,
                                  vertexBuffer: out vertexBuffer,
                                  indexBuffer: out indexBuffer);

            _multiMaterialMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer, indexBuffer,
                                                                        inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate);


            _indexBufferLength = indexBuffer.Length;

            // i1 is at 1/4 of the height of the box
            _firstColorIndex = (int)(_indexBufferLength / 4);

            // i2 is at 3/4 of the height
            _secondColorIndex = _firstColorIndex * 3;

            _multiMaterialMesh.SubMeshes = new SubMesh[]
            {
                new SubMesh("SubMesh1")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = _firstColorIndex
                },
                new SubMesh("SubMesh2")
                {
                    MaterialIndex = 1, StartIndexLocation = _firstColorIndex, IndexCount = _secondColorIndex - _firstColorIndex
                },
                new SubMesh("SubMesh3")
                {
                    MaterialIndex = 2, StartIndexLocation = _secondColorIndex, IndexCount = _indexBufferLength - _secondColorIndex
                },
            };

            _disposables.Add(_multiMaterialMesh);


            var materials = new Ab3d.DirectX.Material[]
            {
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.DimGray.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Silver.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Gold.ToColor3()
                },
            };

            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(_multiMaterialMesh, materials);

            _disposables.Add(_meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        private void ShowGeometryModel3D(GeometryModel3D model3D, float pixelSize)
        {
            MainViewport.Children.Clear();
            _disposables.Dispose();           // Dispose previously used resources

            _disposables = new DisposeList(); // Start with a fresh DisposeList

            if (_pixelEffect == null)
            {
                // Get an instance of PixelEffect (it is used to provide the correct shaders to render specified postions as pixels)
                _pixelEffect = MainDXViewportView.DXScene.DXDevice.EffectsManager.GetEffect <PixelEffect>(createNewEffectInstanceIfNotFound: true);

                // Do not forget to dispose the effect when it is not used any more - we will do that in the Unloaded event handler
            }


            // We will render the GeometryModel3D with overriding the usage of standard effect
            // that renders DiffuseMaterial and other standard materials
            // with custom effect: PixelEffect.
            // This will use different shaders to render the provided triangles.
            //
            // Because the standard material does not provide pixel size,
            // we need to set the fallback pixel size value to the PixelEffect.
            _pixelEffect.PixelSize = pixelSize;


            // To override the used material, we first need to create a new WpfMaterial from the WPF material.
            var wpfMaterial = new WpfMaterial(model3D.Material);

            // then set the Effect to it ...
            wpfMaterial.Effect = _pixelEffect;

            // and finally specify the WpfMaterial to be used whenever the model3D.Material is used.
            model3D.Material.SetUsedDXMaterial(wpfMaterial);

            _disposables.Add(wpfMaterial);


            // Now just add the model3D to the MainViewport
            var modelVisual3D = new ModelVisual3D();

            modelVisual3D.Content = model3D;

            MainViewport.Children.Add(modelVisual3D);


            // IMPORTANT:
            // The above method of showing 3D model with pixels is not optimal.
            // The reason for this is that this way the pixles are rendered for each triangle in the model.
            // But because the same positions are usually used by multiple triangles, those positions will be rendered multiple times.
            //
            // A better way to render the models is to extract the positions from MeshGeometry3D
            // and render the pixels are a list of positions.
            // This is shown in the commented code below:
            //
            // The code above is provided to also show a way to render a 3D model with a different effect.

            //var meshGeometry3D = (MeshGeometry3D)model3D.Geometry;
            //var positions = meshGeometry3D.Positions;
            //var positionsCount = positions.Count;

            //var positionsArray = new Vector3[positionsCount];

            //if (model3D.Transform == null || model3D.Transform.Value.IsIdentity)
            //{
            //    for (int i = 0; i < positionsCount; i++)
            //        positionsArray[i] = positions[i].ToVector3();
            //}
            //else
            //{
            //    for (int i = 0; i < positionsCount; i++)
            //        positionsArray[i] = model3D.Transform.Transform(positions[i]).ToVector3();
            //}

            //// Extract pixel color from material (use Red as fallback)
            //Color4 pixelColor = Colors.Red.ToColor4();
            //var diffuseMaterial = model3D.Material as DiffuseMaterial;
            //if (diffuseMaterial != null)
            //{
            //    var solidColorBrush = diffuseMaterial.Brush as SolidColorBrush;
            //    if (solidColorBrush != null)
            //        pixelColor = solidColorBrush.Color.ToColor4();
            //}

            //ShowPositionsArray(positionsArray, pixelSize, pixelColor, model3D.Bounds.ToDXEngineBounds());
        }
Esempio n. 18
0
        private void CreateSampleScene()
        {
            _physicallyBasedMaterial = new PhysicallyBasedMaterial();

            // We need to dispose the PhysicallyBasedMaterial when this sample is uloaded
            _disposables.Add(_physicallyBasedMaterial);


            UpdateBaseColor();
            UpdateMetalness();
            UpdateRoughness();

            var normalMapShaderResourceView = GetNormalMapShaderResourceView();

            if (normalMapShaderResourceView != null)
            {
                _physicallyBasedMaterial.SetTextureMap(TextureMapTypes.NormalMap, normalMapShaderResourceView, "bricks_normal.png");
            }

            var ambientOcclusionShaderResourceView = AmbientOcclusionShaderResourceView();

            if (ambientOcclusionShaderResourceView != null)
            {
                _physicallyBasedMaterial.SetTextureMap(TextureMapTypes.AmbientOcclusion, ambientOcclusionShaderResourceView, "bricks_ao.png");
            }



            var wpfMaterial = new DiffuseMaterial(Brushes.Red);

            wpfMaterial.SetUsedDXMaterial(_physicallyBasedMaterial);

            ModelPlaceholder.Content = null;
            ModelPlaceholder.Children.Clear();


            var sphereVisual3D = new Ab3d.Visuals.SphereVisual3D()
            {
                CenterPosition          = new Point3D(40, 12, 0),
                Radius                  = 12,
                Segments                = 50,
                Material                = wpfMaterial,
                FreezeMeshGeometry3D    = false,
                UseCachedMeshGeometry3D = false
            };

            ModelPlaceholder.Children.Add(sphereVisual3D);


            var boxVisual3D = new Ab3d.Visuals.BoxVisual3D()
            {
                CenterPosition          = new Point3D(-40, 10, 0),
                Size                    = new Size3D(20, 20, 20),
                Material                = wpfMaterial,
                FreezeMeshGeometry3D    = false,
                UseCachedMeshGeometry3D = false
            };

            ModelPlaceholder.Children.Add(boxVisual3D);


            var readerObj = new Ab3d.ReaderObj();

            var readModel3D = (GeometryModel3D)readerObj.ReadModel3D(_modelsFolder + "teapot-hires.obj");

            Ab3d.Utilities.ModelUtils.CenterAndScaleModel3D(readModel3D, new Point3D(0, 10, 0), new Size3D(40, 40, 40), true);

            //// This code is called for each GeometryModel3D inside Plane1
            //var tangentVectors = Ab3d.DirectX.Utilities.MeshUtils.CalculateTangentVectors((MeshGeometry3D)readModel3D.Geometry);

            ////// Assign tangent array to the MeshGeometry3D
            //readModel3D.Geometry.SetDXAttribute(DXAttributeType.MeshTangentArray, tangentVectors);



            readModel3D.Material = wpfMaterial;

            //ModelPlaceholder.Content = null;
            ModelPlaceholder.Children.Add(readModel3D.CreateModelVisual3D());



            // Rendering normal (bump) maps require tangent vectors.
            // The following code will generate tangent vectors and assign them to the MeshGeometry3D that form our 3D model.
            // If tangent vectors are not provided, they will be calculated on-demand in the pixel shader (slightly reducing performance).

            Ab3d.Utilities.ModelIterator.IterateGeometryModel3DObjects(ModelPlaceholder, delegate(GeometryModel3D geometryModel3D, Transform3D transform3D)
            {
                // This code is called for each GeometryModel3D inside Plane1
                var tangentVectors = Ab3d.DirectX.Utilities.MeshUtils.CalculateTangentVectors((MeshGeometry3D)geometryModel3D.Geometry);

                // Assign tangent array to the MeshGeometry3D
                geometryModel3D.Geometry.SetDXAttribute(DXAttributeType.MeshTangentArray, tangentVectors);
            });


            Camera1.Distance = 150;

            UpdateLights();
        }
Esempio n. 19
0
        private void SetupOutlineRenderingStep()
        {
            _solidColorEffectWithOutline = new SolidColorEffect();

            _solidColorEffectWithOutline.Color = Colors.Orange.ToColor4();
            _solidColorEffectWithOutline.OverrideModelColor = true;
            _solidColorEffectWithOutline.OutlineThickness   = 0;
            _solidColorEffectWithOutline.WriteMaxDepthValue = false;

            _solidColorEffectWithOutline.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_solidColorEffectWithOutline);


            _backgroundColor = new Color4(1, 1, 1, 0);


            var stencilSetToOneDescription = new DepthStencilStateDescription()
            {
                IsDepthEnabled = true,
                DepthWriteMask = DepthWriteMask.All,

                DepthComparison  = Comparison.LessEqual,
                IsStencilEnabled = true,
                StencilReadMask  = 0xFF,
                StencilWriteMask = 0xFF,
                FrontFace        =
                {
                    Comparison         = Comparison.Always,
                    DepthFailOperation = StencilOperation.Keep,
                    FailOperation      = StencilOperation.Keep,
                    PassOperation      = StencilOperation.Replace // The value that is set as reference is set in the ContextStateManger when _deviceContext.OutputMerger.SetDepthStencilState is called - the value is set to 1.
                }
            };

            stencilSetToOneDescription.BackFace = stencilSetToOneDescription.FrontFace;

            _stencilSetToOneDepthStencilState = new DepthStencilState(MainDXViewportView.DXScene.Device, stencilSetToOneDescription);

            if (MainDXViewportView.DXScene.DXDevice.IsDebugDevice)
            {
                _stencilSetToOneDepthStencilState.DebugName = "StencilSetToOneDepthStencilState";
            }

            _disposables.Add(_stencilSetToOneDepthStencilState);


            var renderWhenStencilIsNotOneDescription = new DepthStencilStateDescription()
            {
                IsDepthEnabled = true,
                DepthWriteMask = DepthWriteMask.All,

                DepthComparison  = Comparison.LessEqual,
                IsStencilEnabled = true,
                StencilReadMask  = 0xFF,
                StencilWriteMask = 0xFF,
                FrontFace        =
                {
                    Comparison         = Comparison.Greater, // render only when 1 is greater then stencil value
                    DepthFailOperation = StencilOperation.Keep,
                    FailOperation      = StencilOperation.Keep,
                    PassOperation      = StencilOperation.Keep
                }
            };

            renderWhenStencilIsNotOneDescription.BackFace = stencilSetToOneDescription.FrontFace;

            _renderWhenStencilIsNotOneState = new DepthStencilState(MainDXViewportView.DXScene.Device, renderWhenStencilIsNotOneDescription);

            if (MainDXViewportView.DXScene.DXDevice.IsDebugDevice)
            {
                _renderWhenStencilIsNotOneState.DebugName = "RenderWhenStencilIsNotOneState";
            }

            _disposables.Add(_renderWhenStencilIsNotOneState);


            var renderingStepsGroup = new RenderingStepsGroup("Render outline group", MainDXViewportView.DXScene.RenderingSteps);

            renderingStepsGroup.BeforeRunningStep += (object sender, DirectX.RenderingEventArgs args) =>
            {
                var renderingContext = args.RenderingContext;

                // Set new back buffer where we will render outline objects
                SetupOutlineBackBuffers(renderingContext);

                // Set new DepthStencilState that will also set stencil value to 1 for each rendered pixel
                renderingContext.ContextStatesManager.DepthStencilState = _stencilSetToOneDepthStencilState;

                _addOutlineRenderingStep.SourceTexture = _outlineShaderResourceView;
            };

            renderingStepsGroup.AfterRunningStep += (object sender, DirectX.RenderingEventArgs args) =>
            {
                var renderingContext = args.RenderingContext;

                // Reset the saved back buffer
                RestoreBackBuffers(renderingContext);
            };


            // The first step in rendering outlines is to render selected objects with the selected solid color.
            // This is done with creating a custom RenderObjectsRenderingStep
            // and overriding the OverrideEffect to use SolidColorEffect and _stencilSetToOneDepthStencilState.
            // We also render only the selected objects - this is done with using FilterObjectsFunction.
            var renderOutlinesObjectsRenderingStep = new RenderObjectsRenderingStep("Render outlined objects")
            {
                OverrideEffect            = _solidColorEffectWithOutline,
                OverrideDepthStencilState = _stencilSetToOneDepthStencilState,

                FilterObjectsFunction = delegate(RenderablePrimitiveBase objectToRender)
                {
                    // IMPORTANT:
                    // This delegate is highly performance critical because it is called for each object in each frame.
                    // Therefore do not access any WPF's DependencyProperties there.

                    var wpfGeometryModel3DNode = objectToRender.OriginalObject as WpfGeometryModel3DNode;

                    if (wpfGeometryModel3DNode == null)
                    {
                        return(false);
                    }

                    // NOTE:
                    // Here we do a simple check for object name in a List<string>.
                    // If you have a lot of selected objects, use HashSet<string> instead.
                    //
                    // The reason why the check is done by the object name is that this way
                    // we can simply connect the WPF object (that is selected) and the SceneNode object that is created from the WPF object.
                    // So if we define the name for the WPF objects, then the SceneNode objects that are created from them will also have the same name.
                    //
                    // But if it is not possible to name WPF objects of if you have duplicate names,
                    // then you will need to store SceneNode objects in HashSet instead of object names.
                    //
                    // To use this we need to get the SceneNode instances that are created from WPF objects.
                    // One option is to call MainDXViewportView.GetSceneNodeForWpfObject(wpfObject) for each WPF object (this must be called after the SceneNodes are initialized - for example in DXSceneInitialized).
                    // Another option is to wait until SceneNodes are created from WPF objects (in DXSceneInitialized event handler or if the scene was already created after calling MainDXViewportView.Update()).
                    // Then go through all SceneNodes in the hierarchy (you can use MainDXViewportView.DXScene.RootNode.ForEachChildNode method)
                    // and for each WpfGeometryModel3DNode gets its GeometryModel3D and build a Dictionary with WPF (GeometryModel3D) and SceneNode (WpfGeometryModel3DNode) objects.
                    // Then when you select for which WPF objects you want to draw outline, create a HashSet<WpfGeometryModel3DNode> with list of WpfGeometryModel3DNode objects.

                    return(_selectedObjectNames.Contains(wpfGeometryModel3DNode.Name));
                }
            };

            renderingStepsGroup.Children.Add(renderOutlinesObjectsRenderingStep);


            // Add two ExpandPostProcess that will make the outline bigger
            int outlineWidth = (int)OutlineSizeSlider.Value;

            _horizontalExpandPostProcess = new Ab3d.DirectX.PostProcessing.ExpandPostProcess(isVerticalRenderingPass: false, expansionWidth: outlineWidth, backgroundColor: _backgroundColor);
            _verticalExpandPostProcess   = new Ab3d.DirectX.PostProcessing.ExpandPostProcess(isVerticalRenderingPass: true, expansionWidth: outlineWidth, backgroundColor: _backgroundColor);

            _disposables.Add(_horizontalExpandPostProcess);
            _disposables.Add(_verticalExpandPostProcess);

            var expandPostProcesses = new List <PostProcessBase>();

            expandPostProcesses.Add(_horizontalExpandPostProcess);
            expandPostProcesses.Add(_verticalExpandPostProcess);


            // We could also blur the outline to make it bigger, but Expand creates better results
            //var horizontalBlurPostProcess = new Ab3d.DirectX.PostProcessing.SimpleBlurPostProcess(isVerticalBlur: false, filterWidth: 5);
            //var verticalBlurPostProcess   = new Ab3d.DirectX.PostProcessing.SimpleBlurPostProcess(isVerticalBlur: true, filterWidth: 5);
            //horizontalBlurPostProcess.InitializeResources(MainDXViewportView.DXScene.DXDevice);
            //verticalBlurPostProcess.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            //blurPostProcesses.Add(horizontalBlurPostProcess);
            //blurPostProcesses.Add(verticalBlurPostProcess);


            _expandObjectsPostProcessesRenderingSteps = new RenderPostProcessingRenderingStep("Expand objects rendering step", expandPostProcesses);
            renderingStepsGroup.Children.Add(_expandObjectsPostProcessesRenderingSteps);


            MainDXViewportView.DXScene.RenderingSteps.AddAfter(MainDXViewportView.DXScene.DefaultInitializeRenderingStep, renderingStepsGroup);


            _addOutlineRenderingStep = new RenderTextureRenderingStep(RenderTextureRenderingStep.TextureChannelsCount.FourChannels, "Render outline over 3D scene")
            {
                Offsets                 = new Vector4(0, 0, 0, 0),                                                     // preserve original colors
                Factors                 = new Vector4(1, 1, 1, 1),
                TargetViewport          = new ViewportF(0, 0, 1f, 1f),                                                 // render to full screen
                CustomBlendState        = MainDXViewportView.DXScene.DXDevice.CommonStates.NonPremultipliedAlphaBlend, // alpha blend
                CustomDepthStencilState = _renderWhenStencilIsNotOneState,                                             // only render when stencil value is less then 1 (not where the objects are rendered)
            };

            _addOutlineRenderingStep.BeforeRunningStep += delegate(object sender, DirectX.RenderingEventArgs args)
            {
                var renderingContext = args.RenderingContext;

                renderingContext.SetBackBuffer(renderingContext.CurrentBackBuffer, renderingContext.CurrentBackBufferDescription, renderingContext.CurrentRenderTargetView, _outlineDepthStencilView, false);
                //renderingContext.DeviceContext.OutputMerger.SetTargets(_outlineDepthStencilView, renderingContext.CurrentRenderTargetView);
            };

            MainDXViewportView.DXScene.RenderingSteps.AddBefore(MainDXViewportView.DXScene.DefaultCompleteRenderingStep, _addOutlineRenderingStep);
        }
        private void CreateTestModels()
        {
            _rootModelVisual3D = new ModelVisual3D();
            MainViewport3D.Children.Add(_rootModelVisual3D);



            // SphereVisual3D
            _sphereVisual3D = new Ab3d.Visuals.SphereVisual3D()
            {
                CenterPosition = new Point3D(-50, 0, -50),
                Radius         = 30,
                Material       = new DiffuseMaterial(Brushes.Silver)
            };

            _sphereVisual3D.SetName("SphereVisual3D");

            _rootModelVisual3D.Children.Add(_sphereVisual3D);


            var readerObj   = new ReaderObj();
            var teapotModel = readerObj.ReadModel3D(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Models\teapot-hires.obj"));

            Ab3d.Utilities.ModelUtils.CenterAndScaleModel3D(teapotModel, centerPosition: new Point3D(50, 0, -50), finalSize: new Size3D(80, 80, 80), preserveAspectRatio: true);

            _teapotModelVisual3D = new ModelVisual3D()
            {
                Content = teapotModel
            };

            teapotModel.SetName("teapot Model3D");
            _teapotModelVisual3D.SetName("teapot ModelVisual3D");

            _rootModelVisual3D.Children.Add(_teapotModelVisual3D);


            // InstancedMeshGeometryVisual3D
            var boxMesh3D = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(6, 6, 6), 1, 1, 1);

            InstanceData[] instancedData = DXEnginePerformance.InstancedMeshGeometry3DTest.CreateInstancesData(center: new Point3D(-50, 0, 50),
                                                                                                               size: new Size3D(80, 50, 80),
                                                                                                               modelScaleFactor: 1,
                                                                                                               xCount: 5,
                                                                                                               yCount: 1,
                                                                                                               zCount: 5,
                                                                                                               useTransparency: false);

            _instancedMeshGeometryVisual3D = new InstancedMeshGeometryVisual3D(boxMesh3D.Geometry);
            _instancedMeshGeometryVisual3D.InstancesData = instancedData;

            _instancedMeshGeometryVisual3D.SetName("InstancedMeshGeometryVisual3D");
            _rootModelVisual3D.Children.Add(_instancedMeshGeometryVisual3D);



            // MeshObjectNode and SceneNodeVisual3D
            var meshGeometry3D   = new Ab3d.Meshes.PyramidMesh3D(new Point3D(50, -20, 50), new Size3D(80, 40, 80)).Geometry;
            var dxMeshGeometry3D = new Ab3d.DirectX.Models.DXMeshGeometry3D(meshGeometry3D);

            var standardMaterial = new StandardMaterial()
            {
                DiffuseColor = Colors.Gold.ToColor3()
            };

            _pyramidMeshObjectNode = new Ab3d.DirectX.MeshObjectNode(dxMeshGeometry3D, standardMaterial);

            _disposables.Add(dxMeshGeometry3D);
            _disposables.Add(_pyramidMeshObjectNode);

            var sceneNodeVisual3D = new SceneNodeVisual3D(_pyramidMeshObjectNode);

            sceneNodeVisual3D.SetName("SceneNodeVisual3D");
            _rootModelVisual3D.Children.Add(sceneNodeVisual3D);
        }
Esempio n. 21
0
        private void AddNewDXViewportView(Grid parentGrid, int rowIndex, int columnIndex, double sphereRadius, double depthBias, bool createDXEngine, string title,
                                          out Border createdBorder, out Viewport3D createdViewport3D, out DXViewportView createdDXViewportView, out TargetPositionCamera createdTargetPositionCamera)
        {
            createdBorder = new Border()
            {
                BorderBrush         = Brushes.Black,
                BorderThickness     = new Thickness(1, 1, 1, 1),
                SnapsToDevicePixels = true
            };


            createdViewport3D = new Viewport3D();

            createdTargetPositionCamera = new TargetPositionCamera()
            {
                Heading          = 30,
                Attitude         = -20,
                Distance         = 200,
                TargetPosition   = new Point3D(0, 0, 0),
                ShowCameraLight  = ShowCameraLightType.Always,
                TargetViewport3D = createdViewport3D
            };

            bool showSphere = ShowSphereRadioButton.IsChecked ?? false;

            CreateScene(createdViewport3D, sphereRadius, depthBias, createdTargetPositionCamera, showSphere);

            if (createDXEngine)
            {
                createdDXViewportView = new DXViewportView(createdViewport3D)
                {
                    SnapsToDevicePixels = true
                };

                createdBorder.Child = createdDXViewportView;

                _disposables.Add(createdDXViewportView);
            }
            else
            {
                createdBorder.Child   = createdViewport3D;
                createdDXViewportView = null;
            }

            Grid.SetRow(createdBorder, rowIndex);
            Grid.SetColumn(createdBorder, columnIndex);

            parentGrid.Children.Add(createdBorder);
            parentGrid.Children.Add(createdTargetPositionCamera);

            if (!string.IsNullOrEmpty(title))
            {
                var textBlock = new TextBlock()
                {
                    Text                = title,
                    Foreground          = Brushes.Black,
                    FontSize            = 12,
                    Margin              = new Thickness(10, 5, 5, 5),
                    HorizontalAlignment = HorizontalAlignment.Left,
                    VerticalAlignment   = VerticalAlignment.Top
                };

                Grid.SetRow(textBlock, rowIndex);
                Grid.SetColumn(textBlock, columnIndex);

                parentGrid.Children.Add(textBlock);
            }
        }
Esempio n. 22
0
        private void InitializePointCloud(Vector3[] positions, BoundingBox positionsBounds, Color4[] positionColors, bool useOptimizedPointMesh, bool disableDepthRead, bool disableDepthWrite)
        {
            if (MainDXViewportView.DXScene == null)
            {
                return; // If this happens, then this method is called too soon (before DXEngine is initialized) or we are using WPF 3D
            }
            // First, set up the material:

            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor  = Color4.White, // When using PixelColors, PixelColor is used as a mask (multiplied with each color)
                PixelSize   = 2,
                PixelColors = positionColors,

                // By default graphics card renders objects that are closer to the camera over the objects that are farther away from the camera.
                // This means that positions that are closer to the camera will be rendered over the positions that are farther away.
                // This may distort the shown colors.
                // Therefore when using pixel colors it is better to disable depth buffer checking and render all the pixels.
                // This is done with setting ReadZBuffer and WriteZBuffer to false.
                ReadZBuffer  = !disableDepthRead,
                WriteZBuffer = !disableDepthWrite
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now set up the mesh and create SceneNode to show it

            if (useOptimizedPointMesh)
            {
                _optimizedPointMesh = new OptimizedPointMesh <Vector3>(positions,
                                                                       positionsBounds,
                                                                       segmentsCount: 100);

                // NOTE that you can also use OptimizedPointMesh that takes more complex vertex struct for example PositionColor or PositionNormal. In this case use the other constructor.

                _optimizedPointMesh.OptimizationIndicesNumberThreshold = 100000; // We are satisfied with reducing the number of shown positions to 100000 (no need to optimize further - higher number reduced the initialization time)
                _optimizedPointMesh.MaxOptimizationViewsCount          = 10;     // Maximum number of created data sub-sets. The actual number can be lower when we hit the OptimizationIndicesNumberThreshold or when all vertices needs to be shown.

                _optimizedPointMesh.Optimize(new SharpDX.Size2(MainDXViewportView.DXScene.Width, MainDXViewportView.DXScene.Height), standardPointSize: 1);

                _optimizedPointMesh.InitializeResources(MainDXViewportView.DXScene.DXDevice);

                _disposables.Add(_optimizedPointMesh);


                // To render OptimizedPointMesh we need to use CustomRenderableNode that provides custom rendering callback action.
                var customRenderableNode = new CustomRenderableNode(RenderAction, _optimizedPointMesh.Bounds, _optimizedPointMesh, _pixelMaterial);
                customRenderableNode.Name = "CustomRenderableNode";
                //customRenderableNode.CustomRenderingQueue = MainDXViewportView.DXScene.BackgroundRenderingQueue;

                _disposables.Add(customRenderableNode);

                var sceneNodeVisual3D = new SceneNodeVisual3D(customRenderableNode);
                //sceneNodeVisual3D.Transform = transform;

                MainViewport.Children.Add(sceneNodeVisual3D);
            }
            else
            {
                // Use SimpleMesh - all positions will be always rendered:

                var simpleMesh = new SimpleMesh <Vector3>(vertexBufferArray: positions,
                                                          indexBufferArray: null,
                                                          inputLayoutType: InputLayoutType.Position);

                simpleMesh.PrimitiveTopology = PrimitiveTopology.PointList; // We need to change the default PrimitiveTopology.TriangleList to PointList

                // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.

                // It is highly recommended to manually set the Bounds.
                simpleMesh.Bounds = new Bounds(positionsBounds);

                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                //simpleMesh.CalculateBounds();

                // We will need to dispose the SimpleMesh
                _disposables.Add(simpleMesh);


                // Now create a new MeshObjectNode
                _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, _pixelMaterial);

                _disposables.Add(_meshObjectNode);

                // To be able to add the MeshObjectNode (or any other SceneNode) to WPF's Viewport3D,
                // we need to create a SceneNodeVisual3D
                var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

                MainViewport.Children.Add(sceneNodeVisual3D);
            }


            Camera1.TargetPosition = positionsBounds.Center.ToWpfPoint3D();
            Camera1.Distance       = positionsBounds.ToRect3D().GetDiagonalLength();
        }
        public PlanarShadowsCustomization()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene != null)
                {
                    // Load texture file into ShaderResourceView (in our case we load dds file; but we could also load png file)
                    string textureFileName          = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/GrassTexture.jpg");
                    var    loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.DXDevice.Device, textureFileName);

                    _disposables.Add(loadedShaderResourceView);

                    // Define DXEngine's materials for shadow plane
                    _shadowPlaneMaterial = new StandardMaterial()
                    {
                        DiffuseColor    = Color3.White, // When DiffuseTextures are set, then DiffuseColor is used as a color filter (White means no filter)
                        DiffuseTextures = new ShaderResourceView[] { loadedShaderResourceView }
                    };

                    _shadowPlaneBackMaterial = new StandardMaterial()
                    {
                        DiffuseColor = Colors.DimGray.ToColor3()
                    };

                    _disposables.Add(_shadowPlaneMaterial);
                    _disposables.Add(_shadowPlaneBackMaterial);


                    // Define the PlanarShadowRenderingProvider
                    _planarShadowRenderingProvider = new PlanarShadowRenderingProvider()
                    {
                        // We need to provide information about the position of the plane in 3D space
                        ShadowPlaneCenterPosition  = new Vector3(0, 0, 0),
                        ShadowPlaneSize            = new Vector2(400, 400),
                        ShadowPlaneNormalVector    = new Vector3(0, 1, 0),
                        ShadowPlaneHeightDirection = new Vector3(0, 0, -1),

                        // In case ShadowPlaneMaterial and/or ShadowPlaneBackMaterial are defined
                        // the PlanarShadowRenderingProvider will also render the 3D plane.
                        ShadowPlaneMaterial     = _shadowPlaneMaterial,
                        ShadowPlaneBackMaterial = _shadowPlaneBackMaterial,

                        // Set shadow properties
                        ShadowColor        = Color3.Black,
                        ShadowTransparency = (float)ShadowTransparencySlider.Value / 100.0f, // default value is 0.65f

                        // Because shadow is rendered as standard 3D object, we need to offset it from the shadow plane
                        // to prevent z-fighting problems that occur when two 3D objects are rendered to the same 3D position.
                        // This value need to be very small so that it is not seen that the shadow is above the plane.
                        // Default value is 0.01f.
                        ShadowOffsetFromPlane = 0.01f,

                        // When using PlanarShadowRenderingProvider we do not need PlanarShadowMeshCreator from Ab3d.PowerToys
                        // to prepare a special MeshGeometry3D for us. Also PlanarShadowMeshCreator does not need to manually (on the CPU)
                        // cut the shadow to the plane bounds but this can be done with using hardware accelerated algorithm (using stencil buffer).
                        // But if we still want to use PlanarShadowMeshCreator we can set the following two properties to false
                        // (for example if we wanted to use PlanarShadowRenderingProvider just to provide proper transparent shadows).
                        ApplyShadowMatrix      = true,
                        CutShadowToPlaneBounds = true,

                        IsCheckingIsCastingShadow = false, // Initially do not check for IsCastingShadow values (this is also a default value). See comments in LoadModel for more info.

                        //CustomShadowLight = new Ab3d.DirectX.Lights.DirectionalLight(new Vector3(0, -1, 1))
                        //CustomShadowLight = new Ab3d.DirectX.Lights.PointLight(new Vector3(0, 500, 0), 300)
                    };

                    _disposables.Add(_planarShadowRenderingProvider);


                    MainDXViewportView.DXScene.InitializeShadowRendering(_planarShadowRenderingProvider);
                }


                _lightHorizontalAngle = 30;
                _lightVerticalAngle   = 27;
                _lightDistance        = 500;

                _ambientLight = new AmbientLight(System.Windows.Media.Color.FromRgb(40, 40, 40));

                _shadowPointLight       = new PointLight();
                _shadowDirectionalLight = new DirectionalLight();

                Camera1.ShowCameraLight = ShowCameraLightType.Never; // prevent adding camera's light

                SetShadowLight(isDirectionalLight: true);

                UpdateLights();

                _loadedModel3D = LoadModel3D();
                MainViewport.Children.Add(_loadedModel3D.CreateModelVisual3D());
            };



            this.PreviewKeyDown += OnPreviewKeyDown;

            // This will allow receiving keyboard events
            this.Focusable = true;
            this.Focus();

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
Esempio n. 24
0
        private void OnDxSceneDeviceCreated(object sender, EventArgs e)
        {
            //
            // !!! IMPORTANT !!!
            //
            // Some rendering queues are being sorted by material.
            // This improves performance by rendering objects with similar material one after another and
            // this reduces number of DirectX state changes.
            // But when using ObjectId map, we need to disable rendering queues by material.
            // If this is not done, the objects rendered in the standard rendering pass and
            // objects rendered for object id map will be rendered in different order.
            // Because of this we would not be able to get the original object id from the back.
            //
            // Therefore go through all MaterialSortedRenderingQueue and disable sorting.
            //
            // Note the we do not need to disable sorting by camera distance (TransparentRenderingQueue)
            // because the object order does not change when rendering object id map.

            foreach (var materialSortedRenderingQueue in MainDXViewportView.DXScene.RenderingQueues.OfType <MaterialSortedRenderingQueue>())
            {
                materialSortedRenderingQueue.IsSortingEnabled = false;
            }



            // Create a SolidColorEffect that will be used to render each objects with a color from object's id
            _solidColorEffect = new SolidColorEffect
            {
                // We will overwrite the object's color with color specified in SolidColorEffect.Color
                OverrideModelColor = true,

                // Always use Opaque blend state even if alpha is less then 1 (usually PremultipliedAlphaBlend is used in this case).
                // This will allow us to also use alpha component for the object id (in our case RenderingQueue id)
                OverrideBlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.Opaque,

                // By default for alpha values less then 1, the color components are multiplied with alpha value to produce pre-multiplied colors.
                // This will allow us to also use alpha component for the object id (in our case RenderingQueue id)
                PremultiplyAlphaColors = false
            };


            _disposables.Add(_solidColorEffect);

            MainDXViewportView.DXScene.DXDevice.EffectsManager.RegisterEffect(_solidColorEffect);


            // Create a custom rendering step that will be used instead of standard rendering step.
            // It will be used in the CreateObjectIdBitmapButton_OnClick method below
            _objectIdRenderingStep = new CustomActionRenderingStep("ObjectIdRenderingStep")
            {
                CustomAction = ObjectIdRenderingAction,
                IsEnabled    = false                    // IMPORTANT: disable this custom rendering step - it will be enabled when rendering to bitmap
            };

            MainDXViewportView.DXScene.RenderingSteps.AddAfter(MainDXViewportView.DXScene.DefaultRenderObjectsRenderingStep, _objectIdRenderingStep);

            // In this sample we render Object ids to a custom bitmap,
            // so for standard rendering, we disable our custom rendering.
            // But if you went you can enable it and disabled the standard rendering - this will always render objects ids:
            //
            //_objectIdRenderingStep.IsEnabled = true;
            //MainDXViewportView.DXScene.DefaultRenderObjectsRenderingStep.IsEnabled = false;
        }
        private void OnDXSceneDeviceCreated(object sender, EventArgs eventArgs)
        {
            var d3dDevice = MainDXViewportView.DXScene.Device;

            string textureBaseFolder       = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\BricksMaps\");
            string diffuseTextureFilePath  = textureBaseFolder + "bricks.png";
            string normalTextureFilePath   = textureBaseFolder + "bricks_normal.png";
            string specularTextureFilePath = textureBaseFolder + "bricks_specular.png";

            TextureInfo textureInfo;

            _diffuseShaderResourceView  = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(d3dDevice, diffuseTextureFilePath, out textureInfo);
            _normalShaderResourceView   = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(d3dDevice, normalTextureFilePath, loadDdsIfPresent: false, convertTo32bppPRGBA: false);
            _specularShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(d3dDevice, specularTextureFilePath, loadDdsIfPresent: false, convertTo32bppPRGBA: false);

            _disposables.Add(_diffuseShaderResourceView);
            _disposables.Add(_normalShaderResourceView);
            _disposables.Add(_specularShaderResourceView);


            _multiMapMaterial = new MultiMapMaterial();

            _disposables.Add(_multiMapMaterial);

            // When using Diffuse texture, the DiffuseColor is used as color mask - colors from diffuse texture are multiplied with DiffuseColor
            _multiMapMaterial.DiffuseColor = Colors.White.ToColor3();

            // Set specular power and specular color mask
            _multiMapMaterial.SpecularPower = 64;
            _multiMapMaterial.SpecularColor = Colors.White.ToColor3();

            _multiMapMaterial.HasTransparency = textureInfo.HasTransparency;

            // Get recommended BlendState based on HasTransparency and HasPreMultipliedAlpha values.
            // Possible values are: CommonStates.Opaque, CommonStates.PremultipliedAlphaBlend or CommonStates.NonPremultipliedAlphaBlend.
            _multiMapMaterial.BlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.GetRecommendedBlendState(textureInfo.HasTransparency, textureInfo.HasPremultipliedAlpha);

            // We could manually set texture maps, but this will be done in the UpdateSelectedMaps method that is called below
            //_multiMapMaterial.TextureMaps.Add(new TextureMapInfo(TextureMapTypes.DiffuseColor, _diffuseShaderResourceView, null, diffuseTextureFilePath));
            //_multiMapMaterial.TextureMaps.Add(new TextureMapInfo(TextureMapTypes.NormalMap, _normalShaderResourceView, null, normalTextureFilePath));
            //_multiMapMaterial.TextureMaps.Add(new TextureMapInfo(TextureMapTypes.SpecularColor, _specularShaderResourceView, null, specularTextureFilePath));


            UpdateSelectedMaps();


            // MultiMapMaterial also supports rendering environment map.
            // The following commented code shows how to specify it:

            //string packUriPrefix = string.Format("pack://application:,,,/{0};component/Resources/SkyboxTextures/", this.GetType().Assembly.GetName().Name);

            //// Create DXCubeMap with specifying 6 bitmaps for all sides of the cube
            //var dxCubeMap = new DXCubeMap(packUriPrefix,
            //    "CloudyLightRaysRight512.png",
            //    "CloudyLightRaysLeft512.png",
            //    "CloudyLightRaysUp512.png",
            //    "CloudyLightRaysDown512.png",
            //    "CloudyLightRaysFront512.png",
            //    "CloudyLightRaysBack512.png",
            //    MainDXViewportView.DXScene.DXDevice);

            //// To show the environment map correctly for our bitmaps we need to flip bottom bitmap horizontally and vertically
            //dxCubeMap.FlipBitmaps(flipRightBitmapType: DXCubeMap.FlipBitmapType.None,
            //    flipLeftBitmapType: DXCubeMap.FlipBitmapType.None,
            //    flipUpBitmapType: DXCubeMap.FlipBitmapType.None,
            //    flipDownBitmapType: DXCubeMap.FlipBitmapType.FlipXY,
            //    flipFrontBitmapType: DXCubeMap.FlipBitmapType.None,
            //    flipBackBitmapType: DXCubeMap.FlipBitmapType.None);

            //_disposables.Add(dxCubeMap);

            //multiMapMaterial.TextureMaps.Add(new TextureMapInfo(TextureMapTypes.ReflectionMap, specularShaderResourceView, null));
            //multiMapMaterial.TextureMaps.Add(new TextureMapInfo(TextureMapTypes.EnvironmentCubeMap, dxCubeMap.ShaderResourceView, null));


            // Use SetUsedDXMaterial to specify _multiMapMaterial to be used instead of the WPF material specified for the Plane1
            Plane1.Material.SetUsedDXMaterial(_multiMapMaterial);


            // Rendering normal (bump) maps require tangent vectors.
            // The following code will generate tangent vectors and assign them to the MeshGeometry3D that form our 3D model.
            // If tangent vectors are not provided, they will be calculated on-demand in the pixel shader (slightly reducing performance).

            Ab3d.Utilities.ModelIterator.IterateGeometryModel3DObjects(Plane1, delegate(GeometryModel3D geometryModel3D, Transform3D transform3D)
            {
                // This code is called for each GeometryModel3D inside Plane1
                var tangentVectors = Ab3d.DirectX.Utilities.MeshUtils.CalculateTangentVectors((MeshGeometry3D)geometryModel3D.Geometry);

                // Assign tangent array to the MeshGeometry3D
                geometryModel3D.Geometry.SetDXAttribute(DXAttributeType.MeshTangentArray, tangentVectors);
            });
        }
Esempio n. 26
0
        private void CreateScene()
        {
            int xCount = 40;
            int yCount = 1;
            int zCount = 40;

            float sphereRadius = 10;
            float sphereMargin = 10;

            var sphereMeshGeometry3D = new Ab3d.Meshes.SphereMesh3D(new Point3D(0, 0, 0), sphereRadius, 10).Geometry;

            _oneMeshTriangleIndicesCount = sphereMeshGeometry3D.TriangleIndices.Count;


            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;

            var size = new Vector3(xCount * (sphereRadius + sphereMargin), yCount * (sphereRadius + sphereMargin), zCount * (sphereRadius + sphereMargin));

            SubMeshesSample.CreateMultiMeshBuffer(center: new Vector3(0, 0, 0),
                                                  size: size,
                                                  xCount: xCount, yCount: yCount, zCount: zCount,
                                                  meshGeometry3D: sphereMeshGeometry3D,
                                                  vertexBuffer: out vertexBuffer,
                                                  indexBuffer: out indexBuffer);

            _multiMaterialMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer, indexBuffer,
                                                                        inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate);


            // Create all 3 SubMeshes at the beginning.
            // Though at first only the first SubMesh will be rendered (the other two have IndexCount set to 0),
            // this will allow us to simply change the StartIndexLocation and IndexCount of the SubMeshes
            // to show selected part without adding or removing any SubMesh (this would regenerate the RenderingQueues).
            // This way the selection is almost a no-op (only changing a few integer values and rendering the scene again).
            _multiMaterialMesh.SubMeshes = new SubMesh[]
            {
                // First sub-mesh will render triangles from the first to the start of selection (or all triangles if there is no selection)
                new SubMesh("MainSubMesh1")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = indexBuffer.Length
                },

                // Second sub-mesh will render triangles after the selection (this one follows the first on to preserve the same material)
                new SubMesh("MainSubMesh2")
                {
                    MaterialIndex = 0, StartIndexLocation = 0, IndexCount = 0
                },

                // The third sub-mesh will render selected triangles and will use the second material for that.
                new SubMesh("SelectionSubMesh")
                {
                    MaterialIndex = 1, StartIndexLocation = 0, IndexCount = 0
                },
            };

            _disposables.Add(_multiMaterialMesh);

            // Create OctTree from vertexBuffer.
            // This will significantly improve hit testing performance (check this with uncommenting the dxScene.GetClosestHitObject call in OnMouseMouse method).
            _octTree = new OctTree(vertexBuffer, indexBuffer);


            var materials = new Ab3d.DirectX.Material[]
            {
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Green.ToColor3()
                },
                new Ab3d.DirectX.Materials.StandardMaterial()
                {
                    DiffuseColor = Colors.Red.ToColor3()
                }
            };

            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(_multiMaterialMesh, materials);

            _disposables.Add(_meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }
        public PlanarShadows()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene != null)
                {
                    string textureFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/10x10-texture.png");

                    // To load a texture from file, you can use the TextureLoader.LoadShaderResourceView (this supports loading standard image files and also loading dds files).
                    // This method returns a ShaderResourceView and it can also set a textureInfo parameter that defines some of the properties of the loaded texture (bitmap size, dpi, format, hasTransparency).
                    TextureInfo textureInfo;
                    var         loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.Device, textureFileName, out textureInfo);

                    _disposables.Add(loadedShaderResourceView);


                    // Get recommended BlendState based on HasTransparency and HasPreMultipliedAlpha values.
                    // Possible values are: CommonStates.Opaque, CommonStates.PremultipliedAlphaBlend or CommonStates.NonPremultipliedAlphaBlend.
                    var recommendedBlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.GetRecommendedBlendState(textureInfo.HasTransparency, textureInfo.HasPremultipliedAlpha);

                    _planarShadowRenderingProvider = new PlanarShadowRenderingProvider()
                    {
                        ShadowPlaneCenterPosition  = new Vector3(0, 0, 0),
                        ShadowPlaneSize            = new Vector2(400, 400),
                        ShadowPlaneNormalVector    = new Vector3(0, 1, 0),
                        ShadowPlaneHeightDirection = new Vector3(0, 0, -1),

                        ShadowPlaneMaterial = new StandardMaterial()
                        {
                            DiffuseColor      = Color3.White, // When DiffuseTextures are set, then DiffuseColor is used as a color filter (White means no filter)
                            DiffuseTextures   = new ShaderResourceView[] { loadedShaderResourceView },
                            TextureBlendState = recommendedBlendState,
                            HasTransparency   = textureInfo.HasTransparency
                        },

                        ShadowPlaneBackMaterial = new StandardMaterial()
                        {
                            DiffuseColor = Colors.DimGray.ToColor3()
                        },

                        ShadowColor        = Color3.Black,
                        ShadowTransparency = 0.65f,

                        // Because shadow is rendered as standard 3D object, we need to offset it from the shadow plane
                        // to prevent z-fighting problems that occur when two 3D objects are rendered to the same 3D position.
                        // This value need to be very small so that it is not seen that the shadow is above the plane.
                        // Default value is 0.01f.
                        ShadowOffsetFromPlane = 0.01f,

                        // When using PlanarShadowRenderingProvider we do not need PlanarShadowMeshCreator from Ab3d.PowerToys
                        // to prepare a special MeshGeometry3D for us. Also PlanarShadowMeshCreator does not need to manually (on the CPU)
                        // cut the shadow to the plane bounds but this can be done with using hardware accelerated algorithm (using stencil buffer).
                        // But if we still want to use PlanarShadowMeshCreator we can set the following two properties to false
                        // (for example if we wanted to use PlanarShadowRenderingProvider just to provide proper transparent shadows).
                        ApplyShadowMatrix      = true,
                        CutShadowToPlaneBounds = true,

                        // To use a custom light that does not illuminate the 3D scene set the CustomShadowLight.
                        // Otherwise the first light that has DXAttributeType.IsCastingShadow attribute set to true is used.
                        // If no light has IsCastingShadow attribute set, then the first directional or point light is used.
                        //CustomShadowLight = new Ab3d.DirectX.Lights.DirectionalLight(new Vector3(0, -1, 1))
                        //CustomShadowLight = new Ab3d.DirectX.Lights.PointLight(new Vector3(0, 500, 0), 300)
                    };

                    _disposables.Add(_planarShadowRenderingProvider);


                    MainDXViewportView.DXScene.InitializeShadowRendering(_planarShadowRenderingProvider);
                }


                _lightHorizontalAngle = -60;
                _lightVerticalAngle   = 60;
                _lightDistance        = 500;

                _ambientLight = new AmbientLight(System.Windows.Media.Color.FromRgb(40, 40, 40));

                _shadowPointLight       = new PointLight();
                _shadowDirectionalLight = new DirectionalLight();

                Camera1.ShowCameraLight = ShowCameraLightType.Never; // prevent adding camera's light

                SetShadowLight(isDirectionalLight: true);

                UpdateLights();

                CreateSampleObjects();
            };



            this.PreviewKeyDown += OnPreviewKeyDown;

            // This will allow receiving keyboard events
            this.Focusable = true;
            this.Focus();

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                StopAnimation();

                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
        private void CreateScene()
        {
            // IMPORTANT:
            // Before the Form is closed, we need to dispose all the DXEngine objects that we created (all that implement IDisposable).
            // This means that all materials, Mesh objects and SceneNodes need to be disposed.
            // To make this easier, we can use the DisposeList collection that will hold IDisposable objects.
            _disposables = new DisposeList();


            //
            // 1)
            //
            // The easiest way to add 3D models to DXEngine's scene is to add WPF's Visual3D objects to Viewport3D.Children collection:

            var pyramidVisual3D = new Ab3d.Visuals.PyramidVisual3D()
            {
                BottomCenterPosition = new Point3D(-100, 0, 0),
                Size     = new Size3D(80, 50, 80),
                Material = new DiffuseMaterial(Brushes.Blue)
            };

            pyramidVisual3D.SetName("PyramidVisual3D");

            MainViewport.Children.Add(pyramidVisual3D);


            // We could also start from PyramidMesh3D and then create GeometryModel3D and ModelVisual3D
            //var pyramidMeshGeometry3D = new Ab3d.Meshes.PyramidMesh3D(new Point3D(100, 0, 0), new Size3D(80, 50, 80)).Geometry;

            //if (pyramidMeshGeometry3D.Normals.Count == 0)
            //    pyramidMeshGeometry3D.Normals = Ab3d.Utilities.MeshUtils.CalculateNormals(pyramidMeshGeometry3D);

            //var geometryModel3D = new GeometryModel3D(pyramidMeshGeometry3D, diffuseMaterial);
            //var modelVisual3D = new ModelVisual3D()
            //{
            //    Content = geometryModel3D
            //};

            //MainViewport.Children.Add(modelVisual3D);



            // DXEngine internally converts WPF objects into SceneNodes.
            // You can get the string that describes the SceneNodes with opening Visual Studio Immediate Window and execting the following:
            // MainDXViewportView.DXScene.DumpSceneNodes();
            //
            // Usually this is the best was to define the 3D scene.
            //
            // But if you have very complex objects with a lot of positions, it might be good to create the SceneNodes manually.
            // This allows faster initialization because WPF 3D objects are not created.
            // Also all the memory used by WPF 3D objects can be freed.
            //
            // Because WPF uses double type for Point3D and Vector3D types instead of float as in DirectX and DXEngine,
            // the memory size required for a 3D objects in WPF is almost twice the size of what is required in DXEngine.
            //
            // For example if your object has 100.000 positions, the the memory requirements are the following:
            //
            // In WPF:
            // Positions:           100.000 * 3 (x,y,z) * 8 (8 bytes for one double value) = 2.400.000 bytes
            // Normals:             100.000 * 3 (x,y,z) * 8 (8 bytes for one double value) = 2.400.000 bytes
            // Texture coordinates: 100.000 * 2 (u,y) * 8 (8 bytes for one double value)   = 1.600.000 bytes
            // Triangle indices:    100.000 * 4 (4 bytes for one Int32)                    =   400.000 bytes (the actual number of triangle indices may be different - depends on how many positions are shared between triangles)
            // TOTAL:                                                                      = 6.800.000 bytes = 6.7 MB
            //
            // In DXEngine:
            // Positions:           100.000 * 3 (x,y,z) * 4 (4 bytes for one float value) = 1.200.000 bytes
            // Normals:             100.000 * 3 (x,y,z) * 4 (4 bytes for one float value) = 1.200.000 bytes
            // Texture coordinates: 100.000 * 2 (u,y) * 4 (4 bytes for one float value)   =   800.000 bytes
            // Triangle indices:    100.000 * 4 (4 bytes for one Int32)                   =   400.000 bytes
            // TOTAL:                                                                     = 3.600.000 bytes = 3.5 MB
            //
            // Usually both objects need to be initialized (takes CPU time) and are stored in memory.
            //
            //
            // When the DXEngine's SceneNodes are manually created, the WPF objects can be cleared from memory
            // or event the SceneNodes can be created without the intermediate WPF objects.
            //
            // One the SceneNode is created it can be added to the scene with using SceneNodeVisual3D.
            // This is a Visual3D and can be added to the Viewport3D.Children collection.
            // The object also provides a way to add Transformation to the SceneNode.
            //
            // A disadvantage of creating SceneNodes is that such objects cannot be shown when WPF 3D rendering is used (for example in case when DXEngine falls back to WPF 3D rendering because of problems with DirectX initialization).
            // Another disadvantage is that it is more complicated to create and modify SceneNodes.
            //
            // Usually, when memory usage is not problematic, it is better to use standard WPF 3D objects.

            //
            // 2)
            //
            // Create MeshObjectNode from GeometryMesh with providing arrays (IList<T>) for positions, normals, textureCoordinates and triangleIndices:

            Vector3[] positions;
            Vector3[] normals;
            Vector2[] textureCoordinates;
            int[]     triangleIndices;

            // Get Pyramid mesh data
            GetObjectDataArrays(out positions, out normals, out textureCoordinates, out triangleIndices);


            // The easiest way to create DXEngine's material is to use Ab3d.DirectX.Materials.WpfMaterial that takes a WPF material and converts it into DXEngine's material
            var diffuseMaterial = new DiffuseMaterial(Brushes.Green);
            var dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            // Create SceneNode
            // First create GeometryMesh object from the mesh arrays
            var geometryMesh = new Ab3d.DirectX.GeometryMesh(positions, normals, textureCoordinates, triangleIndices, "PyramidMesh3D");

            _disposables.Add(geometryMesh);

            // NOTE:
            // We could also create GeometryMesh from WPF's MeshGeometry with help from DXMeshGeometry3D:
            //var wpfPyramidMesh = new Meshes.PyramidMesh3D(bottomCenterPosition: new System.Windows.Media.Media3D.Point3D(0, 0, 0),
            //                                              size: new System.Windows.Media.Media3D.Size3D(30, 20, 10));

            //var geometryMesh = new Ab3d.DirectX.Models.DXMeshGeometry3D(wpfPyramidMesh.Geometry, "PyramidMesh");


            // Use GeometryMesh to create MeshObjectNode (SceneNode from GeometryMesh object)
            var meshObjectNode = new Ab3d.DirectX.MeshObjectNode(geometryMesh, dxMaterial);

            meshObjectNode.Name = "Green-MeshObjectNode-from-GeometryMesh";

            _disposables.Add(meshObjectNode);

            // Use SceneNodeVisual3D to show SceneNode in DXViewportView
            var sceneNodeVisual3D = new SceneNodeVisual3D(meshObjectNode);

            //sceneNodeVisual3D.Transform = new TranslateTransform3D(0, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 3)
            //
            // Create MeshObjectNode from SimpleMesh<T> with providing VertexBufferArray and IndexBufferArray:
            // This option provides faster initialization, because the VertexBufferArray is already generated and it can be directly used to create DirectX vertex buffer.
            // In the previous sample the VertexBufferArray was generated in the GeometryMesh from positions, normals, textureCoordinates arrays.
            //
            // If you can store your 3D models in disk (or some other location) in a form of VertexBuffer and IndexBuffer,
            // then this is the fastes way to initialize 3D objects.

            //
            // 3a)
            //
            // The standard way to create a SimpleMesh is to use the PositionNormalTexture or some other struct that defines the data for one array:

            PositionNormalTexture[] vertexBuffer;
            int[] indexBuffer;
            GetVertexAndIndexBuffer(out vertexBuffer, out indexBuffer);

            var simpleMesh = new SimpleMesh <PositionNormalTexture>(vertexBuffer,
                                                                    indexBuffer,
                                                                    inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                    name: "SimpleMesh-from-PositionNormalTexture-array");

            _disposables.Add(simpleMesh);

            diffuseMaterial = new DiffuseMaterial(Brushes.Red);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            _redPyramidObjectNode      = new Ab3d.DirectX.MeshObjectNode(simpleMesh, dxMaterial);
            _redPyramidObjectNode.Name = "Red-MeshObjectNode-from-SimpleMesh";

            _disposables.Add(_redPyramidObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(_redPyramidObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(100, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);



            //
            // 3b)
            //
            // It is also possible to create SimpleMesh with a base type - for example float (for example if we read data from file).
            // In this case we need to set the ArrayStride property.
            //
            // A drawback of using a non-standard vertex buffer (Vector3, PositionNormalTexture, PositionNormal or PositionTexture)
            // is that such mesh does not support hit testing.
            // In this sample this is demonstrated with camera rotation around mouse hit object - it is not possible to rotate around SimpleMesh<float>.

            float[] floatVertexBuffer;
            GetFloatVertexAndIndexBuffer(out floatVertexBuffer, out indexBuffer);

            var floatSimpleMesh = new SimpleMesh <float>(floatVertexBuffer,
                                                         indexBuffer,
                                                         inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                         name: "SimpleMesh-from-float-array");

            _disposables.Add(floatSimpleMesh);

            // IMPORTANT:
            // When we do not use PositionNormalTexture or PositionNormal, the DXEngine cannot calculate Bounds of the SimpleMesh for us.
            // In this case we need to calculate and specify Bounds manually:
            // Defined bounds for the following mesh: new Ab3d.Meshes.PyramidMesh3D(new Point3D(0, 0, 0), new Size3D(80, 50, 80))
            floatSimpleMesh.Bounds = new Bounds(new BoundingBox(minimum: new Vector3(-40, -25, -40), maximum: new Vector3(40, 25, 40)));

            // Because we created SimpleMesh with a base type (float),
            // we need to specify how many array elements define one Vertex.
            // This is 8 in our case: 3 (position x,y,z) + 3 (normal x,y,z) + 2 (texture coordinate u,v) = 8
            floatSimpleMesh.ArrayStride = 8;


            diffuseMaterial = new DiffuseMaterial(Brushes.Orange);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            _orangePyramidObjectNode      = new Ab3d.DirectX.MeshObjectNode(floatSimpleMesh, dxMaterial);
            _orangePyramidObjectNode.Name = "Orange-MeshObjectNode-from-FloatSimpleMesh";

            _disposables.Add(_orangePyramidObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(_orangePyramidObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(200, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);



            //
            // 3c)
            //
            // Instead of float array elements, it is also possible to use byte array to create SimpleMesh.
            //
            // As before, a drawback of using a non-standard vertex buffer (Vector3, PositionNormalTexture, PositionNormal or PositionTexture)
            // is that such mesh does not support hit testing.
            // In this sample this is demonstrated with camera rotation around mouse hit object - it is not possible to rotate around SimpleMesh<float>.

            byte[] byteVertexBuffer;
            GetByteVertexAndIndexBuffer(out byteVertexBuffer, out indexBuffer);

            var byteSimpleMesh = new SimpleMesh <byte>(byteVertexBuffer,
                                                       indexBuffer,
                                                       inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                       name: "SimpleMesh-from-byte-array");

            _disposables.Add(byteSimpleMesh);

            // IMPORTANT:
            // When we do not use PositionNormalTexture or PositionNormal, the DXEngine cannot calculate Bounds of the SimpleMesh for us.
            // In this case we need to calculate and specify Bounds manually:
            // Defined bounds for the following mesh: new Ab3d.Meshes.PyramidMesh3D(new Point3D(0, 0, 0), new Size3D(80, 50, 80))
            byteSimpleMesh.Bounds = new Bounds(new BoundingBox(minimum: new Vector3(-40, -25, -40), maximum: new Vector3(40, 25, 40)));

            // Because we created SimpleMesh with a base type (byte),
            // we need to specify how many array elements define one Vertex.
            // This is 32 in our case: 8 (8x float value) * 4 (4 bytes for one float) = 32
            byteSimpleMesh.ArrayStride = 32;


            diffuseMaterial = new DiffuseMaterial(Brushes.Yellow);
            dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);

            meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(byteSimpleMesh, dxMaterial);
            meshObjectNode.Name = "Yellow-MeshObjectNode-from-ByteSimpleMesh";

            _disposables.Add(meshObjectNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(meshObjectNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(300, 0, 0);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 4)
            //
            // When a frozen Model3DGroup is added to the DXViewportView, it is converted into the WpfOptimizedModel3DGroupNode (derived from SceneNode).
            // In this case both WPF and DXEngine's 3D objects data are stored in memory.
            //
            // To release the WPF 3D objects data, it is possible to create the WpfOptimizedModel3DGroupNode manually and
            // then clear the used WPF 3D objects.
            // This can be done with setting the AutomaticallyClearWpfObjectsAfterInitialization property on WpfOptimizedModel3DGroupNode to true,
            // or by calling the ClearWpfObjects method on WpfOptimizedModel3DGroupNode.

            string dragonModelFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Resources\\Models\\dragon_vrip_res3.obj");

            var     readerObj   = new Ab3d.ReaderObj();
            Model3D readModel3D = readerObj.ReadModel3D(dragonModelFileName);

            double scale = 100 / readModel3D.Bounds.SizeX; // Scale the model to 100 SizeX

            readModel3D.Transform = new ScaleTransform3D(scale, scale, scale);

            var model3DGroup = readModel3D as Model3DGroup;

            if (model3DGroup == null)
            {
                model3DGroup = new Model3DGroup();
                model3DGroup.Children.Add(readModel3D);
            }

            model3DGroup.Freeze();


            var wpfOptimizedModel3DGroupNode = new Ab3d.DirectX.Models.WpfOptimizedModel3DGroupNode(model3DGroup, name: "Frozen Model3DGroup");

            wpfOptimizedModel3DGroupNode.AutomaticallyClearWpfObjectsAfterInitialization = true; // This will clear the WPF 3D models that are referenced by WpfOptimizedModel3DGroupNode when the DirectX objects are created

            _disposables.Add(wpfOptimizedModel3DGroupNode);

            sceneNodeVisual3D           = new SceneNodeVisual3D(wpfOptimizedModel3DGroupNode);
            sceneNodeVisual3D.Transform = new TranslateTransform3D(-100, -20, -100);

            MainViewport.Children.Add(sceneNodeVisual3D);


            //
            // 5)
            //
            // The following code shows how to load texture with using TextureLoader

            if (MainDXViewportView.DXScene != null)
            {
                var planeGeometry3D  = new Ab3d.Meshes.PlaneMesh3D(new Point3D(0, 0, 0), new Vector3D(0, 1, 0), new Vector3D(1, 0, 0), new Size(80, 80), 1, 1).Geometry;
                var dxMeshGeometry3D = new DXMeshGeometry3D(planeGeometry3D);
                _disposables.Add(dxMeshGeometry3D);

                // Load texture file into ShaderResourceView (in our case we load dds file; but we could also load png file)
                string textureFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/ab4d-logo-220x220.dds");


                // The easiest way to load image file and in the same time create a material with the loaded texture is to use the CreateStandardTextureMaterial method.
                var standardMaterial = Ab3d.DirectX.TextureLoader.CreateStandardTextureMaterial(MainDXViewportView.DXScene.DXDevice, textureFileName);

                // We need to manually dispose the created StandardMaterial and ShaderResourceView
                _disposables.Add(standardMaterial);
                _disposables.Add(standardMaterial.DiffuseTextures[0]);


                // If we want more control over the material creation process, we can use the following code:

                //// To load a texture from file, you can use the TextureLoader.LoadShaderResourceView (this supports loading standard image files and also loading dds files).
                //// This method returns a ShaderResourceView and it can also set a textureInfo parameter that defines some of the properties of the loaded texture (bitmap size, dpi, format, hasTransparency).
                //TextureInfo textureInfo;
                //var loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.Device,
                //                                                                                 textureFileName,
                //                                                                                 out textureInfo);
                //_disposables.Add(loadedShaderResourceView);

                //// Get recommended BlendState based on HasTransparency and HasPreMultipliedAlpha values.
                //// Possible values are: CommonStates.Opaque, CommonStates.PremultipliedAlphaBlend or CommonStates.NonPremultipliedAlphaBlend.
                //var recommendedBlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.GetRecommendedBlendState(textureInfo.HasTransparency, textureInfo.HasPremultipliedAlpha);

                //// Now we can create a DXEngine's StandardMaterial
                //var standardMaterial = new StandardMaterial()
                //{
                //    // Set ShaderResourceView into array of diffuse textures
                //    DiffuseTextures = new ShaderResourceView[] {loadedShaderResourceView},
                //    TextureBlendState = recommendedBlendState,

                //    HasTransparency = textureInfo.HasTransparency,

                //    // When showing texture, the DiffuseColor represents a color mask - each color from texture is multiplied with DiffuseColor (White preserves the original color)
                //    DiffuseColor = Colors.White.ToColor3()
                //};

                //_disposables.Add(standardMaterial);


                meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(dxMeshGeometry3D, standardMaterial);
                meshObjectNode.Name = "MeshObjectNode-from-PlaneMesh3D";

                _disposables.Add(meshObjectNode);

                sceneNodeVisual3D           = new SceneNodeVisual3D(meshObjectNode);
                sceneNodeVisual3D.Transform = new TranslateTransform3D(0, 0, 100);

                MainViewport.Children.Add(sceneNodeVisual3D);
            }



            // Add PointLight
            var pointLight = new PointLight(Colors.White, new Point3D(100, 500, 0));

            MainViewport.Children.Add(pointLight.CreateModelVisual3D());

            Camera1.ShowCameraLight = ShowCameraLightType.Never;
        }
Esempio n. 29
0
        private void AddSimpleMesh()
        {
            // To show _meshGeometry3D with using low level DXEngine object we will do the following:
            // 1) Create a array of PositionNormalTexture data - this will represent a managed vertex buffer array.
            // 2) Create a SimpleMesh<PositionNormalTexture> object that will create an unmanaged vertex buffer from managed vertex buffer.
            // 3) Create a MeshObjectNode (derived from SceneNode) from the SimpleMesh.
            // 4) Create a SceneNodeVisual3D that will allow us to add the MeshObjectNode to the Viewport3D children.


            // 1) Create a array of PositionNormalTexture data - this will represent a managed vertex buffer array.

            int positionsCount = _meshGeometry3D.Positions.Count;

            _vertexBufferArray = new PositionNormalTexture[positionsCount];
            FillVertexBuffer(_vertexBufferArray, _meshGeometry3D.Positions, _meshGeometry3D.Normals, _meshGeometry3D.TextureCoordinates);

            var indexBuffer = new int[_meshGeometry3D.TriangleIndices.Count];

            _meshGeometry3D.TriangleIndices.CopyTo(indexBuffer, 0);


            // 2) Create a SimpleMesh<PositionNormalTexture> object that will create an unmanaged vertex buffer from managed vertex buffer.

            bool createDynamicVertexBuffer = UseDynamicBufferCheckBox.IsChecked ?? false;

            _simpleMesh = new SimpleMesh <PositionNormalTexture>(_vertexBufferArray,
                                                                 indexBuffer,
                                                                 inputLayoutType: InputLayoutType.Position | InputLayoutType.Normal | InputLayoutType.TextureCoordinate,
                                                                 name: "SimpleMesh-from-PositionNormalTexture-array",
                                                                 createDynamicVertexBuffer: createDynamicVertexBuffer);

            // We can also manually specify the bounds of the mesh
            // If this is not done, then the SimpleMesh will go through all positions and calculate that.
            // But because the bounds are already calculated by MeshGeometry3D, we can just use that value (we only need to convert that to DXEngine's bounds).
            _simpleMesh.Bounds = _meshGeometry3D.Bounds.ToDXEngineBounds();

            _originalMeshSizeY = _simpleMesh.Bounds.BoundingBox.Maximum.Y - _simpleMesh.Bounds.BoundingBox.Minimum.Y;

            _disposables.Add(_simpleMesh);


            var diffuseMaterial = new DiffuseMaterial(Brushes.Silver);
            var dxMaterial      = new Ab3d.DirectX.Materials.WpfMaterial(diffuseMaterial);

            _disposables.Add(dxMaterial);


            // 3) Create a MeshObjectNode (derived from SceneNode) from the SimpleMesh.

            _meshObjectNode      = new Ab3d.DirectX.MeshObjectNode(_simpleMesh, dxMaterial);
            _meshObjectNode.Name = "MeshObjectNode-from-SimpleMesh";

            _disposables.Add(_meshObjectNode);


            // 4) Create a SceneNodeVisual3D that will allow us to add the MeshObjectNode to the Viewport3D children.

            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);


            // Scale and translate the sceneNodeVisual3D and than add it to the scene
            AddVisual3D(sceneNodeVisual3D);
        }
        private void ShowPositionsArray(Vector3[] positionsArray, float pixelSize, Color4 pixelColor, Bounds positionBounds)
        {
            if (_isUsingPixelsVisual3D)
            {
                // The easiest way to show many pixels is to use PixelsVisual3D.
                var pixelsVisual3D = new PixelsVisual3D()
                {
                    Positions  = positionsArray,
                    PixelColor = pixelColor.ToWpfColor(),
                    PixelSize  = pixelSize
                };

                // It is highly recommended to manually set the PositionsBounds.
                // If this is not done, the bounds are calculated by the DXEngine with checking all the positions.
                pixelsVisual3D.PositionsBounds = positionBounds;

                MainViewport.Children.Add(pixelsVisual3D);

                // !!! IMPORTANT !!!
                // When PixelsVisual3D is not used any more, it needs to be disposed (we are using DisposeList to dispose all in Unloaded event handler)
                _disposables.Add(pixelsVisual3D);

                return;
            }


            // First stop in showing positions in the positionsArray as pixels is to create a SimpleMesh<Vector3>.
            // This will create a DirectX VertexBuffer that will be passed to the shaders.
            var simpleMesh = new SimpleMesh <Vector3>(vertexBufferArray: positionsArray,
                                                      indexBufferArray: null,
                                                      inputLayoutType: InputLayoutType.Position);

            simpleMesh.PrimitiveTopology = PrimitiveTopology.PointList; // We need to change the default PrimitiveTopology.TriangleList to PointList

            // To correctly set the Camera's Near and Far distance, we need to provide the correct bounds of each shown 3D model.

            if (positionBounds != null && !positionBounds.IsEmpty)
            {
                // It is highly recommended to manually set the Bounds.
                simpleMesh.Bounds = positionBounds;
            }
            else
            {
                // if we do not manually set the Bounds, then we need to call CalculateBounds to calculate the bounds
                simpleMesh.CalculateBounds();
            }

            simpleMesh.CalculateBounds();


            // We will need to dispose the SimpleMesh
            _disposables.Add(simpleMesh);


            // Create a new PixelMaterial
            _pixelMaterial = new PixelMaterial()
            {
                PixelColor = pixelColor,
                PixelSize  = pixelSize
            };

            _pixelMaterial.InitializeResources(MainDXViewportView.DXScene.DXDevice);

            _disposables.Add(_pixelMaterial);


            // Now create a new MeshObjectNode
            _meshObjectNode = new Ab3d.DirectX.MeshObjectNode(simpleMesh, _pixelMaterial);

            _disposables.Add(_meshObjectNode);

            // To be able to add the MeshObjectNode (or any other SceneNode) to WPF's Viewport3D,
            // we need to create a SceneNodeVisual3D
            var sceneNodeVisual3D = new SceneNodeVisual3D(_meshObjectNode);

            MainViewport.Children.Add(sceneNodeVisual3D);
        }