示例#1
0
        public OutlinesSample()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            CreateSceneObjects();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene == null) // When using WPF 3D rendering
                {
                    return;
                }

                SetupOutlineRenderingStep();
            };

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();

                DisposeOutlineBackBuffers();
            };
        }
示例#2
0
        public BackgroundAndOverlayRendering()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene == null)
                {
                    return; // Probably WPF 3D rendering
                }
                AddStandardRenderedObjects();

                PrepareAlwaysOnTopRendering();
                AddCustomRenderedObjects();

                AddCustomRenderedLines();
            };


            // IMPORTANT:
            // It is very important to call Dispose method on DXSceneView after the control is not used any more (see help file for more info)
            this.Unloaded += delegate
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }

                MainDXViewportView.Dispose();
            };
        }
        public ObjectIdRendering()
        {
            InitializeComponent();

            var boxMesh      = new Ab3d.Meshes.BoxMesh3D(new Point3D(0, 0, 0), new Size3D(1, 1, 1), 1, 1, 1).Geometry;
            int modelsXCount = 20;
            int modelsYCount = 1;
            int modelsZCount = 20;

            var model3DGroup = CreateModel3DGroup(boxMesh, new Point3D(0, 5, 0), new Size3D(500, modelsYCount * 10, 500), 10, modelsXCount, modelsYCount, modelsZCount);

            MainViewport.Children.Add(model3DGroup.CreateModelVisual3D());


            _disposables = new DisposeList();

            MainDXViewportView.DXSceneDeviceCreated += MainDxViewportViewOnDxSceneDeviceCreated;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }

                MainDXViewportView.Dispose();
            };
        }
示例#4
0
        public OptimizedTubePathSample()
        {
            InitializeComponent();


            _disposables = new DisposeList();

            MainDXViewportView.DXSceneDeviceCreated += delegate(object sender, EventArgs args)
            {
                _solidColorEffect = MainDXViewportView.DXScene.DXDevice.EffectsManager.GetEffect <SolidColorEffect>();
                _disposables.Add(_solidColorEffect);

                _stopwatch = new Stopwatch();
                _stopwatch.Start();

                AddSpirals_MeshObjectNode(10, 10, 5000, useMultiThreading: true);

                // Uncomment to see how the tubes are created in a standard Ab3d.PowerToys way.
                //AddSpirals_TubePathVisual3D(10, 10, 5000);

                // To see how to use instancing to draw tube paths, uncomment the following line.
                // Note: In this case is instancing slower then rendering a fixed geometry because the task for the GPU is much more complicated in case of instancing.
                //AddInstancedSpirals(10, 10, 5000);
            };

            // Subscribe to get event when the first frame is rendered
            MainDXViewportView.SceneRendered += MainDXViewportViewOnSceneRendered;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
            };
        }
        public PixelRenderingSample()
        {
            InitializeComponent();

            PixelSizeComboBox.ItemsSource   = new float[] { 0.1f, 0.5f, 1, 2, 4, 8 };
            PixelSizeComboBox.SelectedIndex = 3;

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                CreateScene();
            };

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();

                if (_pixelEffect != null)
                {
                    _pixelEffect.Dispose();
                    _pixelEffect = null;
                }

                MainDXViewportView.Dispose();
            };
        }
        private void Dispose()
        {
            _textureFiles.Clear();
            _texturesCache.Clear();
            _dxMaterials.Clear();

            _disposables.Dispose();
            _disposables = new DisposeList();
        }
        public MouseControllerForPointCloud()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            // Wait until DXScene is initialized and then create the data
            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                Mouse.OverrideCursor = Cursors.Wait;

                try
                {
                    Vector3[] positions;
                    var       positionsBounds = new BoundingBox(new Vector3(-100, 0, -100), new Vector3(100, 20, 100));

                    GenerateSinusPointCloudData(xCount: 300, yCount: 300,
                                                bounds: positionsBounds,
                                                positions: out positions);


                    var linearGradientBrush = CreateDataGradientBrush();
                    var gradientColorsArray = CreateGradientColorsArray(linearGradientBrush);

                    // Setup offsets and factors so that each position will be converted
                    // into a value from 0 to 1 based on the y position.
                    // This value will be used to define the color from the gradientColorsArray.

                    float minValue = positionsBounds.Minimum.Y;
                    float maxValue = positionsBounds.Maximum.Y;

                    var      offsets        = new Vector3(0, -minValue, 0);
                    var      factors        = new Vector3(0, 1.0f / (maxValue - minValue), 0);
                    Color4[] positionColors = CreatePositionColorsArray(positions, offsets, factors, gradientColorsArray);


                    InitializePointCloud(positions, positionsBounds, positionColors);


                    // Set DXScene and OptimizedPointMesh to PointCloudMouseCameraController1
                    PointCloudMouseCameraController1.DXScene            = MainDXViewportView.DXScene;
                    PointCloudMouseCameraController1.OptimizedPointMesh = _optimizedPointMesh;

                    PointCloudMouseCameraController1.MaxDistanceToAnyPosition = 1.0f;
                }
                finally
                {
                    Mouse.OverrideCursor = null;
                }
            };

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
示例#8
0
        private void CreateScene()
        {
            if (MainDXViewportView.DXScene == null)
            {
                return; // Not yet initialized or using WPF 3D
            }
            Mouse.OverrideCursor = Cursors.Wait;

            MainViewport.Children.Clear();
            _modelDisposables.Dispose();           // Dispose previously used resources

            _modelDisposables = new DisposeList(); // Start with a fresh DisposeList


            // Parse number of positions to render
            var regex    = new Regex(@".+\((?<width>\d+)\s?x\s?(?<length>[0-9,]+)\)");
            var sizeText = (string)SceneTypeComboBox.SelectedItem;

            var match = regex.Match(sizeText);

            int widthCount, lengthCount;

            if (match.Success)
            {
                string widthText  = match.Groups["width"].Value;
                string lengthText = match.Groups["length"].Value;

                widthText  = widthText.Replace(",", "");
                lengthText = lengthText.Replace(",", "");

                widthCount  = Int32.Parse(widthText);
                lengthCount = Int32.Parse(lengthText);
            }
            else
            {
                widthCount  = 100;
                lengthCount = 10000;
            }


            float widthStep  = widthCount <= 100 ? 0.1f : 0.01f;
            float lengthStep = lengthCount <= 10000 ? 0.1f : 0.01f;


            float pixelSize = (float)PixelSizeComboBox.SelectedItem;


            var positionsArray = CreatePositionsArray(new Point3D(0, 0, 0), widthCount, lengthCount, widthStep, lengthStep);

            ShowPositionsArray(positionsArray, pixelSize, Colors.Red.ToColor4(), Bounds.Empty);

            Mouse.OverrideCursor = null;
        }
        public NormalMappingSample()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneDeviceCreated += OnDXSceneDeviceCreated;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
示例#10
0
        private void Dispose()
        {
            if (_disposables != null)
            {
                _disposables.Dispose();
                _disposables = null;
            }

            if (_mainDXViewportView != null)
            {
                _mainDXViewportView.Dispose();
                _mainDXViewportView = null;
            }
        }
示例#11
0
        public PixelRenderingOptionsSample()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            CreateScene();

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
示例#12
0
        public ObjectSelectionWithSubMeshes()
        {
            InitializeComponent();

            _disposables = new DisposeList();
            CreateScene();

            SetupHitTesting();

            this.Unloaded += delegate(object sender, RoutedEventArgs e)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
示例#13
0
        protected virtual void Dispose(bool disposing)
        {
            if (!disposed)
            {
                if (disposing)
                {
                    // Dispose managed resources here.
                    DisposeList.Dispose();
                    Disposing();
                }

                // Clear unmanaged resources here

                disposed = true;
            }
        }
示例#14
0
        public OptimizedPointCloud()
        {
            InitializeComponent();

            PixelSizeComboBox.ItemsSource   = new float[] { 0.1f, 0.5f, 1, 2, 4, 8 };
            PixelSizeComboBox.SelectedIndex = 3;

            SceneTypeComboBox.ItemsSource = new string[]
            {
                "100,000 pixels (100 x 1,000)",
                "1 million pixels (100 x 10,000)",
                "10 million pixels (100 x 100,000)",
                "25 million pixels (1000 x 25,000)",
                "100 million pixels (1000 x 100,000)",
                "150 million pixels (1000 x 150,000)"
                // Bigger values would exceed max .Net array size in CreatePositionsArray method
            };
            SceneTypeComboBox.SelectedIndex = 1;

            _modelDisposables = new DisposeList();


            DXDiagnostics.IsCollectingStatistics = true; // Collect rendering statistics

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                CreateScene();
            };

            MainDXViewportView.SceneRendered += delegate(object sender, EventArgs args)
            {
                UpdateStatistics();
            };

            // Setup keyboard support
            this.Focusable       = true;             // by default Page is not focusable and therefore does not receive keyDown event
            this.PreviewKeyDown += OnPreviewKeyDown; // Use PreviewKeyDown to get arrow keys also (KeyDown event does not get them)
            this.Focus();

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _modelDisposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
        public void TestNonDisposeList()
        {
            Semaphore s = new Semaphore(0, 100);
            // Create object
            DisposeList disp = FileSystem.OS
                               .AddDisposeAction(d => s.Release(), s);

            //
            Assert.IsFalse(s.WaitOne(1000));
            Assert.IsFalse(disp.IsDisposeCalled);
            Assert.IsFalse(disp.IsDisposing);
            Assert.IsFalse(disp.IsDisposed);
            disp.Dispose();
            Assert.IsTrue(s.WaitOne(1000));
            Assert.IsFalse(disp.IsDisposeCalled);
            Assert.IsFalse(disp.IsDisposing);
            Assert.IsFalse(disp.IsDisposed);
        }
示例#16
0
        private void ClearAllScenes()
        {
            RootGrid.BeginInit();

            for (var i = RootGrid.Children.Count - 1; i >= 0; i--)
            {
                var oneChild = RootGrid.Children[i];

                if (!ReferenceEquals(oneChild, SettingsBorder) && !ReferenceEquals(oneChild, TitleTextBlock))
                {
                    RootGrid.Children.Remove(oneChild);
                }
            }

            RootGrid.EndInit();


            _disposables.Dispose();
            _disposables = new DisposeList();
        }
        public SubMeshesSample()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            CreateScene();

            StartAnimation();


            this.Unloaded += delegate(object sender, RoutedEventArgs e)
            {
                CompositionRenderingHelper.Instance.Unsubscribe(this);

                _disposables.Dispose();

                MainDXViewportView.Dispose();
            };
        }
示例#18
0
        public OptimizedMeshMorphing()
        {
            InitializeComponent();

            _meshMorphTimes      = new List <double>();
            _meshUpdateTimes     = new List <double>();
            _dxEngineUpdateTimes = new List <double>();

            DXDiagnostics.IsCollectingStatistics = true;

            CreateScene();
            CreateMorphedMesh();

            MainDXViewportView.SceneRendered += delegate
            {
                if (_dxEngineUpdateTimes == null)
                {
                    _dxEngineUpdateTimes = new List <double>();
                }

                if (MainDXViewportView.DXScene != null && MainDXViewportView.DXScene.Statistics != null)
                {
                    _dxEngineUpdateTimes.Add(MainDXViewportView.DXScene.Statistics.UpdateTimeMs);
                }
            };

            CompositionTarget.Rendering += CompositionTargetOnRendering;

            this.Unloaded += delegate(object sender, RoutedEventArgs e)
            {
                CompositionTarget.Rendering -= CompositionTargetOnRendering;

                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }

                MainDXViewportView.Dispose();
            };
        }
        public AdvancedInstanceRendering()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            // Wait until DXScene and DirectX device are initialized.
            // Then we can create the instance buffers in the InstancedMeshGeometry3DNode object.
            MainDXViewportView.DXSceneDeviceCreated += MainDXViewportViewOnDXSceneDeviceCreated;

            // Start animating...
            CompositionTarget.Rendering += CompositionTargetOnRendering;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                CompositionTarget.Rendering -= CompositionTargetOnRendering;

                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
        public HitTestingSample()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            _hitLinesModelVisual3D = new ModelVisual3D();
            MainViewport3D.Children.Add(_hitLinesModelVisual3D);


            CreateTestModels();

            MainDXViewportView.SceneRendered += MainDXViewportViewOnSceneRendered;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();

                MainDXViewportView.Dispose();
            };
        }
示例#21
0
        public static void Main(string[] args)
        {
            {
                #region Snippet_10a
                IDisposable  disposable  = new ReaderWriterLockSlim();
                IDisposeList disposeList = new DisposeList();
                disposeList.AddDisposable(disposable);
                // ... do work ... and dispose both.
                disposeList.Dispose();
                #endregion Snippet_10a
            }
            {
                #region Snippet_10b
                IDisposeList disposeList = new DisposeList();
                disposeList.AddDisposeAction(_ => Console.WriteLine("Disposed"), null);
                // ... do work ...
                disposeList.Dispose();
                #endregion Snippet_10b
            }
            {
                #region Snippet_10c
                IBelatableDispose disposeList = new DisposeList();

                // Postpone dispose
                IDisposable belateDisposeHandle = disposeList.BelateDispose();
                // Start concurrent work
                Task.Run(() =>
                {
                    // Do work
                    Thread.Sleep(100);
                    // Release belate handle. Disposes here or below, depending which thread runs last.
                    belateDisposeHandle.Dispose();
                });

                // Start dispose, but postpone it until belatehandle is disposed in another thread.
                disposeList.Dispose();
                #endregion Snippet_10c
            }
        }
        //private int _xCount = 10000;
        //private int _yCount = 2000;
        // Performance values for the 10000 x 2000 height map on NVIDIA 1080:
        // DirectXOverlay: render time 0.25 ms (4000 FPS)
        // DirectXImage - with back face rendering (bottom of the height map): render time around 14 ms (70 FPS)
        //              - without back face rendering:                         render time around 7 ms (140 FPS)

        public OptimizedHeightMapGeneration()
        {
            InitializeComponent();

            _disposables = new DisposeList();


            float[,] heightData;
            Color4[] positionColorsArray;

            //GenerateSimpleHeightData(out heightData, out positionColorsArray);
            //GenerateRandomHeightData(out heightData, out positionColorsArray);
            GenerateSinusHeightData(out heightData, out positionColorsArray);

            GenerateHeightMapObject(heightData, positionColorsArray);


            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
        public void TestBelateNonDispose()
        {
            Semaphore s = new Semaphore(0, 100);
            // Create object
            DisposeList disp = FileSystem.OS;
            // Belate dispose, this handle is passed to the task
            IDisposable belateHandle = disp.BelateDispose();

            // Start work
            Task.Run(() =>
            {
                // Do work
                Thread.Sleep(1000);
                //
                Assert.IsFalse(disp.IsDisposing);
                Assert.IsTrue(disp.IsDisposeCalled);
                // Release belate handle
                belateHandle.Dispose();
                // Check is disposed
                Assert.IsFalse(disp.IsDisposed);
                //
                s.Release();
            });
            // Start dispose, which is postponed
            disp.Dispose();
            // Check is disposed
            Assert.IsTrue(disp.IsDisposeCalled);
            Assert.IsFalse(disp.IsDisposed);
            Assert.IsFalse(disp.IsDisposing);

            // The test exists before the thread
            s.WaitOne();
            Assert.IsFalse(disp.IsDisposeCalled);
            Assert.IsFalse(disp.IsDisposing);
            Assert.IsFalse(disp.IsDisposed);
        }
示例#24
0
        // This sample shows how to prevent problems when 3D lines are rendered on top of 3D solid models.
        // In this case the lines can appear broken because they are rendered at the same 3D position as a solid model - a problem know as z-fighting.
        // The standard way to solve this problem is by applying a so called depth bias.
        //
        // This cannot be done with WPF 3D rendering, but line rendering in DXEngine supports that.
        //
        // The easiest way to set depth bias is to use the following code:
        // lineVisual3D.SetDXAttribute(DXAttributeType.LineDepthBias, depthBias);
        //
        // In this case the lineVisual3D will be moved for the depthBias amount (in world coordinates) closer to the camera.
        // This calculation is done in the vertex shader so this works for any camera orientation.
        //
        // The LineDepthBias attribute can be applied for all Visual3D objects from Ab3d.PowerToys library that show 3D lines
        // (LineVisual3D, MultiLineVisual3D, PolyLineVisual3D, WireframeVisual3D, etc.)
        //
        // Note that the scene is big (camera distance is big) then a bigger depth bias is needed.
        // For example:
        // When this sample is rendered with sphere with radius 10, the DepthBias 0.01 works well.
        // But when the sphere's radius is 1000, the 0.01 is not enough. In this case DepthBias 1 is needed.

        public LineDepthBiasSample()
        {
            InitializeComponent();


            SphereRadiusComboBox.ItemsSource  = new double[] { 1, 10, 100, 1000 };
            SphereRadiusComboBox.SelectedItem = 100.0;

            DepthBiasComboBox.ItemsSource  = new double[] { 0, 0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1.0, 5.0, 10.0 };
            DepthBiasComboBox.SelectedItem = 0.1;


            _disposables = new DisposeList();

            CreateAllScenes();

            RadiusInfoControl.InfoText =
                @"This ComboBox changes the radius of the sphere on the left.
Because the camera distance is also adjusted, it looks
like the sphere is still the same size though the size has changed.

The size change can be seen with checking the 3D lines that
may appear too distance from the solid object or disconnected
because of z-fighting.

This shows that different object sizes require different bias values.";

            Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }
            };
        }
        public AlphaClippingSample()
        {
            InitializeComponent();

            StandardAlphaBlendingInfoControl.InfoText =
                @"Alpha blending is the standard way of rendering transparent objects in 3D graphics.
With alpha blending the transparent pixels are blended with the colors of the already
rendered 3D objects - the alpha color value defines the amount of the color that is 
taken from the transparent object compared to the already rendered color.

This technique can provide very accurate results. 
But this requires that the objects are rendered in the correct order - 
first all opaque objects are rendered and then the transparent objects from the most distant objects 
(from the camera's position) to the objects closer to the camera are rendered.

Sorting transparent objects is required for 2 reasons:
1) when rendering a transparent pixel, the color of the objects behind this pixel need to be final 
   for the blending to be correct.
2) usually when rendering transparent objects they also write to the depth buffer - this means that
   after they have been rendered no other objects that is farther away from the camera will be rendered
   (depth test will prevent rendering those objects).

Therefore it is essential first to render the opaque objects, 
then sort transparent objects and render them.";


            AlphaClippingThresholdInfoControl.InfoText =
                @"Alpha clipping is a technique that can be used when rendering textures with some pixels opaque and some transparent.
In this case, the user can select an alpha clip threshold - this is a value that specifies at which alpha value the 
pixels will be clipped (not rendered and their depth will not be written to the depth buffer).

For example, if AlphaClipThreshold is set to 0.2 then all pixels with alpha value less then 0.2 will be clipped.
This is useful when textures have very distinctive fully transparent and fully opaque areas.
In this case it may not be necessary to sort transparent objects, but it is still recommended because the
pixels that are not clipped are still rendered with alpha blending.

The problem with this technique is that at the border of the transparent area some artifacts may appear -
adjusting the threshold value may help to solve that - check the green trees and adjust the threshold value to see that.";

            AlphaToCoverageInfoControl.InfoText =
                @"Alpha-to-coverage is a special blending technique that can be used to render textures with transparent and semi-transparent
pixels and does not require objects to be sorted by their camera distance.

When using alpha-to-coverage then the graphics card determines if the pixel is transparent or opaque based on the alpha value
(when alpha is less the 0.5 then the pixel is considered to be fully transparent; otherwise the pixel is considered to be fully opaque).

What is more, when using MSAA (multi-sample anti-aliasing) then the level of transparency can be defined more accurately 
with making some sub-pixel samples transparent and some opaque.
For, example when using 8 x MSAA then each pixel's color is calculated with combining 8 sub-pixel samples -
when alpha-to-coverage is enabled and the alpha value is 0.25 (=2/8) then 2 of the samples will be transparent and 6 will be opaque.

This way it is possible to create smoother transitions from fully transparent to fully opaque regions.
This technique does not produce as accurate results as standard alpha blending, but a big advantage is that it does not require 
objects to be sorted (and rendered) from those that are farthest away to those that are closest to the camera 
(and the results are still very good for some use cases - especially when the textures has small transitions from transparent to opaque).";



            AlphaClippingThresholdComboBox.ItemsSource = new string[]
            {
                "0 (disabled)", // alpha clipping is disabled: no color will be written to the pixel, but the pixels depth information will be written to depth buffer and this will prevent rendering objects that are rendered after this object and are farther away from the camera (even when behind fully transparent pixels).
                "0.01",         // discard only pixels with alpha less then 0.01 - this may leave some semi-transparent border around the object
                "0.02",
                "0.05",
                "0.1",
                "0.2",
                "0.5",
                "0.8",
                "0.9",
                "0.99" // discard all non-opaque pixels (pixels with alpha less then 0.99 are discarded) - this leaves no semi-transparent border around the object but my clip too much pixels
            };

            AlphaClippingThresholdComboBox.SelectedIndex = 4; // = "0.1"

            MainDXViewportView.DXSceneDeviceCreated += delegate(object sender, EventArgs args)
            {
                CreateTestSemiTransparentObjects();

                MainDXViewportView.DXScene.IsTransparencySortingEnabled = false; // Disable sorting by camera distance
            };


            Camera1.CameraChanged += Camera1OnCameraChanged;

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }

                MainDXViewportView.Dispose();
            };
        }
        public PlanarShadows()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene != null)
                {
                    string textureFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/10x10-texture.png");

                    // To load a texture from file, you can use the TextureLoader.LoadShaderResourceView (this supports loading standard image files and also loading dds files).
                    // This method returns a ShaderResourceView and it can also set a textureInfo parameter that defines some of the properties of the loaded texture (bitmap size, dpi, format, hasTransparency).
                    TextureInfo textureInfo;
                    var         loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.Device, textureFileName, out textureInfo);

                    _disposables.Add(loadedShaderResourceView);


                    // Get recommended BlendState based on HasTransparency and HasPreMultipliedAlpha values.
                    // Possible values are: CommonStates.Opaque, CommonStates.PremultipliedAlphaBlend or CommonStates.NonPremultipliedAlphaBlend.
                    var recommendedBlendState = MainDXViewportView.DXScene.DXDevice.CommonStates.GetRecommendedBlendState(textureInfo.HasTransparency, textureInfo.HasPremultipliedAlpha);

                    _planarShadowRenderingProvider = new PlanarShadowRenderingProvider()
                    {
                        ShadowPlaneCenterPosition  = new Vector3(0, 0, 0),
                        ShadowPlaneSize            = new Vector2(400, 400),
                        ShadowPlaneNormalVector    = new Vector3(0, 1, 0),
                        ShadowPlaneHeightDirection = new Vector3(0, 0, -1),

                        ShadowPlaneMaterial = new StandardMaterial()
                        {
                            DiffuseColor      = Color3.White, // When DiffuseTextures are set, then DiffuseColor is used as a color filter (White means no filter)
                            DiffuseTextures   = new ShaderResourceView[] { loadedShaderResourceView },
                            TextureBlendState = recommendedBlendState,
                            HasTransparency   = textureInfo.HasTransparency
                        },

                        ShadowPlaneBackMaterial = new StandardMaterial()
                        {
                            DiffuseColor = Colors.DimGray.ToColor3()
                        },

                        ShadowColor        = Color3.Black,
                        ShadowTransparency = 0.65f,

                        // Because shadow is rendered as standard 3D object, we need to offset it from the shadow plane
                        // to prevent z-fighting problems that occur when two 3D objects are rendered to the same 3D position.
                        // This value need to be very small so that it is not seen that the shadow is above the plane.
                        // Default value is 0.01f.
                        ShadowOffsetFromPlane = 0.01f,

                        // When using PlanarShadowRenderingProvider we do not need PlanarShadowMeshCreator from Ab3d.PowerToys
                        // to prepare a special MeshGeometry3D for us. Also PlanarShadowMeshCreator does not need to manually (on the CPU)
                        // cut the shadow to the plane bounds but this can be done with using hardware accelerated algorithm (using stencil buffer).
                        // But if we still want to use PlanarShadowMeshCreator we can set the following two properties to false
                        // (for example if we wanted to use PlanarShadowRenderingProvider just to provide proper transparent shadows).
                        ApplyShadowMatrix      = true,
                        CutShadowToPlaneBounds = true,

                        // To use a custom light that does not illuminate the 3D scene set the CustomShadowLight.
                        // Otherwise the first light that has DXAttributeType.IsCastingShadow attribute set to true is used.
                        // If no light has IsCastingShadow attribute set, then the first directional or point light is used.
                        //CustomShadowLight = new Ab3d.DirectX.Lights.DirectionalLight(new Vector3(0, -1, 1))
                        //CustomShadowLight = new Ab3d.DirectX.Lights.PointLight(new Vector3(0, 500, 0), 300)
                    };

                    _disposables.Add(_planarShadowRenderingProvider);


                    MainDXViewportView.DXScene.InitializeShadowRendering(_planarShadowRenderingProvider);
                }


                _lightHorizontalAngle = -60;
                _lightVerticalAngle   = 60;
                _lightDistance        = 500;

                _ambientLight = new AmbientLight(System.Windows.Media.Color.FromRgb(40, 40, 40));

                _shadowPointLight       = new PointLight();
                _shadowDirectionalLight = new DirectionalLight();

                Camera1.ShowCameraLight = ShowCameraLightType.Never; // prevent adding camera's light

                SetShadowLight(isDirectionalLight: true);

                UpdateLights();

                CreateSampleObjects();
            };



            this.PreviewKeyDown += OnPreviewKeyDown;

            // This will allow receiving keyboard events
            this.Focusable = true;
            this.Focus();

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                StopAnimation();

                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
示例#27
0
        public RectangularSelectionSample()
        {
            InitializeComponent();


            BoundsIn2DInfoControl.InfoText =
                @"The simplest technique to do rectangular selection is to convert the object's 3D bounds (axis aligned bounding box) into a 2D rectangle that represents the bounds on the screen. Then we can simply call IntersectsWith method that checks if the two 2D rectangles intersect.

Advantages:
- Very simple and fast when there is not a lot of 3D objects.
- Also selects the objects that are behind the objects closer to the camera.
- Can be used with only Ab3d.PowerToys (without Ab3d.DXEngine).

Disadvantages:
- Not accurate - the bounds of 3D objects and its bounds in 2D world are bigger then the the actual 3D object - selection is done before the user actually touches the 3D object.
- Slow when checking a lot of 3D objects.
- Cannot be used to select 3D lines.";


            ObjectIdMapInfoControl.InfoText =
                @"With Ab3d.DXEngine it is possible to render objects to a bitmap in such a way that each object is rendered with a different color where the color represents the object's id. When such a bitmap is rendered it is possible to get individual pixel colors and from that get the original object that was used to render the pixel.

Advantages:
- Pixel perfect accuracy.
- Fast when rendering a lot of objects.
- Can be used to select 3D lines.
- Can be extended to support some other selection types and not only rectangular selection.

Disadvantages:
- More complex (using custom rendering steps) than using simple bounding boxes.
- Slower when using simple 3D scene (DXEngine needs to set up the DirectX resources for another rendering pass; also much more memory is required).
- We need to find original WPF 3D object from DXEngine's RenderablePrimitive object.
- Cannot select objects that are behind some other objects that are closer to the camera.";


            MouseCameraControllerInfo1.AddCustomInfoLine(0, MouseCameraController.MouseAndKeyboardConditions.LeftMouseButtonPressed, "Rectangular selection");

            this.Cursor          = Cursors.Cross;
            OptionsBorder.Cursor = Cursors.Arrow;


            _selectedDiffuseMaterial = new DiffuseMaterial(Brushes.Red);

            _savedMaterials  = new Dictionary <GeometryModel3D, Material>();
            _savedLineColors = new Dictionary <BaseLineVisual3D, Color>();

            _disposables = new DisposeList();


            CreateTestScene();


            // Setup mouse events that will be used to show rectangular selection
            this.MouseLeftButtonDown += OnMouseLeftButtonDown;
            this.MouseMove           += OnMouseMove;
            this.MouseLeftButtonUp   += OnMouseLeftButtonUp;


            MainDXViewportView.DXSceneDeviceCreated += OnDxSceneDeviceCreated;

            this.Loaded += delegate(object sender, RoutedEventArgs args)
            {
                DXView.GetDpiScale(this, out _dpiScaleX, out _dpiScaleY);
            };

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }

                MainDXViewportView.Dispose();
            };
        }
        public PlanarShadowsCustomization()
        {
            InitializeComponent();

            _disposables = new DisposeList();

            MainDXViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (MainDXViewportView.DXScene != null)
                {
                    // Load texture file into ShaderResourceView (in our case we load dds file; but we could also load png file)
                    string textureFileName          = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources/GrassTexture.jpg");
                    var    loadedShaderResourceView = Ab3d.DirectX.TextureLoader.LoadShaderResourceView(MainDXViewportView.DXScene.DXDevice.Device, textureFileName);

                    _disposables.Add(loadedShaderResourceView);

                    // Define DXEngine's materials for shadow plane
                    _shadowPlaneMaterial = new StandardMaterial()
                    {
                        DiffuseColor    = Color3.White, // When DiffuseTextures are set, then DiffuseColor is used as a color filter (White means no filter)
                        DiffuseTextures = new ShaderResourceView[] { loadedShaderResourceView }
                    };

                    _shadowPlaneBackMaterial = new StandardMaterial()
                    {
                        DiffuseColor = Colors.DimGray.ToColor3()
                    };

                    _disposables.Add(_shadowPlaneMaterial);
                    _disposables.Add(_shadowPlaneBackMaterial);


                    // Define the PlanarShadowRenderingProvider
                    _planarShadowRenderingProvider = new PlanarShadowRenderingProvider()
                    {
                        // We need to provide information about the position of the plane in 3D space
                        ShadowPlaneCenterPosition  = new Vector3(0, 0, 0),
                        ShadowPlaneSize            = new Vector2(400, 400),
                        ShadowPlaneNormalVector    = new Vector3(0, 1, 0),
                        ShadowPlaneHeightDirection = new Vector3(0, 0, -1),

                        // In case ShadowPlaneMaterial and/or ShadowPlaneBackMaterial are defined
                        // the PlanarShadowRenderingProvider will also render the 3D plane.
                        ShadowPlaneMaterial     = _shadowPlaneMaterial,
                        ShadowPlaneBackMaterial = _shadowPlaneBackMaterial,

                        // Set shadow properties
                        ShadowColor        = Color3.Black,
                        ShadowTransparency = (float)ShadowTransparencySlider.Value / 100.0f, // default value is 0.65f

                        // Because shadow is rendered as standard 3D object, we need to offset it from the shadow plane
                        // to prevent z-fighting problems that occur when two 3D objects are rendered to the same 3D position.
                        // This value need to be very small so that it is not seen that the shadow is above the plane.
                        // Default value is 0.01f.
                        ShadowOffsetFromPlane = 0.01f,

                        // When using PlanarShadowRenderingProvider we do not need PlanarShadowMeshCreator from Ab3d.PowerToys
                        // to prepare a special MeshGeometry3D for us. Also PlanarShadowMeshCreator does not need to manually (on the CPU)
                        // cut the shadow to the plane bounds but this can be done with using hardware accelerated algorithm (using stencil buffer).
                        // But if we still want to use PlanarShadowMeshCreator we can set the following two properties to false
                        // (for example if we wanted to use PlanarShadowRenderingProvider just to provide proper transparent shadows).
                        ApplyShadowMatrix      = true,
                        CutShadowToPlaneBounds = true,

                        IsCheckingIsCastingShadow = false, // Initially do not check for IsCastingShadow values (this is also a default value). See comments in LoadModel for more info.

                        //CustomShadowLight = new Ab3d.DirectX.Lights.DirectionalLight(new Vector3(0, -1, 1))
                        //CustomShadowLight = new Ab3d.DirectX.Lights.PointLight(new Vector3(0, 500, 0), 300)
                    };

                    _disposables.Add(_planarShadowRenderingProvider);


                    MainDXViewportView.DXScene.InitializeShadowRendering(_planarShadowRenderingProvider);
                }


                _lightHorizontalAngle = 30;
                _lightVerticalAngle   = 27;
                _lightDistance        = 500;

                _ambientLight = new AmbientLight(System.Windows.Media.Color.FromRgb(40, 40, 40));

                _shadowPointLight       = new PointLight();
                _shadowDirectionalLight = new DirectionalLight();

                Camera1.ShowCameraLight = ShowCameraLightType.Never; // prevent adding camera's light

                SetShadowLight(isDirectionalLight: true);

                UpdateLights();

                _loadedModel3D = LoadModel3D();
                MainViewport.Children.Add(_loadedModel3D.CreateModelVisual3D());
            };



            this.PreviewKeyDown += OnPreviewKeyDown;

            // This will allow receiving keyboard events
            this.Focusable = true;
            this.Focus();

            this.Unloaded += delegate(object sender, RoutedEventArgs args)
            {
                _disposables.Dispose();
                MainDXViewportView.Dispose();
            };
        }
 private void Dispose()
 {
     _disposables.Dispose();
     _disposables = new DisposeList();
 }
        public ManuallyCreatedSceneNodes()
        {
            InitializeComponent();


            // Wait until the DirectX device is created and then create the sample objects
            MainDXViewportView.DXSceneDeviceCreated += delegate(object sender, EventArgs args)
            {
                CreateScene();
                ResetCamera();
            };

            MainDXViewportView.SceneRendered += MainDXViewportViewOnSceneRendered;


            // Show 3D WireCrossVisual3D at the location of camera rotation
            MouseCameraController1.CameraRotateStarted += delegate(object sender, EventArgs args)
            {
                if (_wireCrossVisual3D != null)
                {
                    MainViewport.Children.Remove(_wireCrossVisual3D);
                    _wireCrossVisual3D = null;
                }

                Point3D rotationCenter;

                if (Camera1.RotationCenterPosition.HasValue)
                {
                    rotationCenter = Camera1.RotationCenterPosition.Value;
                }
                else
                {
                    rotationCenter = Camera1.TargetPosition;
                }


                _wireCrossVisual3D = new WireCrossVisual3D()
                {
                    LineColor     = Colors.Red,
                    LineThickness = 3,
                    LinesLength   = 100,
                    Position      = rotationCenter
                };

                MainViewport.Children.Add(_wireCrossVisual3D);
            };

            MouseCameraController1.CameraRotateEnded += delegate(object sender, EventArgs args)
            {
                if (_wireCrossVisual3D != null)
                {
                    MainViewport.Children.Remove(_wireCrossVisual3D);
                    _wireCrossVisual3D = null;
                }
            };


            this.Unloaded += delegate(object sender, RoutedEventArgs e)
            {
                if (_disposables != null)
                {
                    _disposables.Dispose();
                    _disposables = null;
                }

                MainDXViewportView.Dispose();
            };
        }