/// <summary> /// Renders the viewport to a bitmap. /// </summary> /// <param name="view">The viewport.</param> /// <param name="background">The background.</param> /// <param name="m">The oversampling multiplier.</param> /// <returns>A bitmap.</returns> public static BitmapSource RenderBitmap(Viewport3D view, Brush background, int m = 1) { var target = new WriteableBitmap((int)view.ActualWidth * m, (int)view.ActualHeight * m, 96, 96, PixelFormats.Pbgra32, null); var originalCamera = view.Camera; var vm = GetViewMatrix(originalCamera); double ar = view.ActualWidth / view.ActualHeight; for (int i = 0; i < m; i++) { for (int j = 0; j < m; j++) { // change the camera viewport and scaling var pm = GetProjectionMatrix(originalCamera, ar); if (originalCamera is OrthographicCamera) { pm.OffsetX = m - 1 - i * 2; pm.OffsetY = -(m - 1 - j * 2); } if (originalCamera is PerspectiveCamera) { pm.M31 = -(m - 1 - i * 2); pm.M32 = m - 1 - j * 2; } pm.M11 *= m; pm.M22 *= m; var mc = new MatrixCamera(vm, pm); view.Camera = mc; var partialBitmap = new RenderTargetBitmap((int)view.ActualWidth, (int)view.ActualHeight, 96, 96, PixelFormats.Pbgra32); // render background var backgroundRectangle = new Rectangle { Width = partialBitmap.Width, Height = partialBitmap.Height, Fill = background }; backgroundRectangle.Arrange(new Rect(0, 0, backgroundRectangle.Width, backgroundRectangle.Height)); partialBitmap.Render(backgroundRectangle); // render 3d partialBitmap.Render(view); // copy to the target bitmap CopyBitmap(partialBitmap, target, (int)(i * view.ActualWidth), (int)(j * view.ActualHeight)); } } // restore the camera view.Camera = originalCamera; return(target); }
/// <summary> /// ViewMatrix encapsulates the camera transform plus the camera's view matrix. /// Therefore, the ViewMatrix is the matrix representing the camera's viewpoint from its current position. /// </summary> public static Matrix3D ViewMatrix(Camera camera) { Matrix3D inverseCamera = InverseCameraMatrix(camera); if (camera is MatrixCamera) { MatrixCamera c = camera as MatrixCamera; return(Multiply(inverseCamera, c.ViewMatrix)); } else if (camera is ProjectionCamera) { ProjectionCamera c = camera as ProjectionCamera; return(Multiply(inverseCamera, MakeViewMatrix(c.Position, c.LookDirection, c.UpDirection))); } throw new ArgumentException("Invalid camera specified " + camera.GetType()); }
/// <summary/> public static Matrix3D ProjectionMatrix(Camera camera) { if (camera is MatrixCamera) { MatrixCamera c = camera as MatrixCamera; return(c.ProjectionMatrix); } else if (camera is PerspectiveCamera) { PerspectiveCamera c = camera as PerspectiveCamera; return(MakePerspectiveProjection(c.NearPlaneDistance, c.FarPlaneDistance, c.FieldOfView, c.FieldOfView)); } else if (camera is OrthographicCamera) { OrthographicCamera c = camera as OrthographicCamera; return(MakeOrthographicProjection(c.NearPlaneDistance, c.FarPlaneDistance, c.Width, c.Width)); } throw new ArgumentException("Invalid camera specified " + camera.GetType()); }
private static Ray3D RayFromMatrixCameraPoint(MatrixCamera camera, Point point, Size viewSize) { Point normalizedPoint = CameraRayHelpers.GetNormalizedPoint(point, viewSize); Matrix3D matrix3D = camera.ViewMatrix * camera.ProjectionMatrix; if (!matrix3D.HasInverse) { throw new NotSupportedException(ExceptionStringTable.NeedToHandleSingularMatrixCameras); } matrix3D.Invert(); Point4D point4D = new Point4D(normalizedPoint.X, normalizedPoint.Y, 0.0, 1.0) * matrix3D; Point3D origin = new Point3D(point4D.X / point4D.W, point4D.Y / point4D.W, point4D.Z / point4D.W); Vector3D direction = new Vector3D(matrix3D.M31 - matrix3D.M34 * origin.X, matrix3D.M32 - matrix3D.M34 * origin.Y, matrix3D.M33 - matrix3D.M34 * origin.Z); direction.Normalize(); if (point4D.W < 0.0) { direction = -direction; } return(new Ray3D(origin, direction)); }
/// <summary> /// Computes the effective view matrix for the given /// camera. /// </summary> public static Matrix3D GetViewMatrix(Camera camera) { if (camera == null) { throw new ArgumentNullException("camera"); } ProjectionCamera projectionCamera = camera as ProjectionCamera; if (projectionCamera != null) { return(GetViewMatrix(projectionCamera)); } MatrixCamera matrixCamera = camera as MatrixCamera; if (matrixCamera != null) { return(matrixCamera.ViewMatrix); } throw new ArgumentException(String.Format("Unsupported camera type '{0}'.", camera.GetType().FullName), "camera"); }
/// <summary> /// Начать выполнение выбранного макроса /// </summary> private void StartButton_Click(object sender, EventArgs e) { #region шпаргалка //ссылка на коды клавиш //https://docs.microsoft.com/ru-ru/windows/desktop/inputdev/virtual-key-codes //SendKeys.Send(""); //шпаргалка //keybd_event((byte)VkKeyScan('D')/*D*/, 1/*???*/, KEYEVENTF_KEYUP, 0); //System.Threading.Thread.Sleep(5000); //keybd_event(0x44/*D*/, 1/*???*/, KEYEVENTF_EXTENDEDKEY, 0); //System.Threading.Thread.Sleep(1000); //keybd_event(0x44/*D*/, 1/*???*/, KEYEVENTF_KEYUP, 0); #endregion if (_currentMacros == null) { pickMacrosErrorLabel.Visible = true; return; } _secondsBeforeStart = string.IsNullOrWhiteSpace(startSecTextBox.Text) ? 0 : int.Parse(startSecTextBox.Text); Thread.Sleep(_secondsBeforeStart * Command._microConvert * 10); switch (_currentMacros.Name) { case "Фидер Комариное Караси": RunFiderKomarinoeMacros(); return; case "Фидер Острог Караси": RunFiderOstrogMacros(); return; case "Копать": RunKopatMacros(); return; default: RunMacros(_currentMacros); #region Отслеживание нажатий //keyboardScaner.Start(); #endregion break; } //#region TEST //Camera Cam = new MatrixCamera(); // камера //float maxY; // ограничение по оси Y //float speed; //скорость перемещения камеры //Cam.transform.position = new Vector3(transform.position.x, // transform.position.y - speed, transform.position.z); //if (Cam.transform.position.y < maxY) // ограничение по оси Y //{ // Cam.transform.position = new Vector3(transform.position.x, maxY,transform.position.z); //} //#endregion Camera cam = new MatrixCamera(); }
/// <summary> /// CreateResources is called when the VirtualRealityProvider is initialized and should create the DirectX resources. /// </summary> /// <remarks> /// <para> /// <b>CreateResources</b> is called when the VirtualRealityProvider is initialized and should create the DirectX resources. /// </para> /// <para> /// This method is called after this virtual reality provider is registered with calling the <see cref="DXScene.InitializeVirtualRealityRendering"/> method. /// This method then calls the <see cref="DXSceneResource.InitializeResources"/> and the <see cref="VirtualRealityProviderBase.OnInitializeResources"/>. /// OnInitializeResources calls the this CreateResources method and then <see cref="VirtualRealityProviderBase.InitializeRenderingSteps"/> method. /// </para> /// <para> /// This method usually creates pixel shaders and constant buffers. /// Other resources (back buffers and views) are usually created in <see cref="VirtualRealityProviderBase.UpdateRenderingContext"/> where the size of the current back buffer is compared with the size of back buffers for virtual reality. /// </para> /// </remarks> /// <param name="dxScene">parent DXScene</param> protected override void CreateResources(DXScene dxScene) { if (_eyeTextureSwapChains != null) { if (_eyeTextureSwapChains[0] != null) { _eyeTextureSwapChains[0].Dispose(); } if (_eyeTextureSwapChains[1] != null) { _eyeTextureSwapChains[1].Dispose(); } } else { _eyeTextureSwapChains = new OculusTextureSwapChain[2]; } _eyeTextureSwapChains[0] = new OculusTextureSwapChain(_ovr, _sessionPtr, dxScene.Device, EyeType.Left, Format.B8G8R8A8_UNorm_SRgb, _ovr.GetFovTextureSize(_sessionPtr, EyeType.Left, _hmdDesc.DefaultEyeFov[0], 1.0f), createDepthStencilView: true, isDebugDevice: dxScene.DXDevice.IsDebugDevice); _eyeTextureSwapChains[1] = new OculusTextureSwapChain(_ovr, _sessionPtr, dxScene.Device, EyeType.Left, Format.B8G8R8A8_UNorm_SRgb, _ovr.GetFovTextureSize(_sessionPtr, EyeType.Right, _hmdDesc.DefaultEyeFov[1], 1.0f), createDepthStencilView: true, isDebugDevice: dxScene.DXDevice.IsDebugDevice); _layerShared = new LayerEyeFov(); _layerShared.Header = new LayerHeader() { Type = LayerType.EyeFov, Flags = LayerFlags.HighQuality }; // Specify the texture to show on the HMD. _layerShared.ColorTextureLeft = _eyeTextureSwapChains[0].TextureSwapChainPtr; _layerShared.ColorTextureRight = _eyeTextureSwapChains[1].TextureSwapChainPtr; _layerShared.ViewportLeft.Position = new Vector2i(0, 0); _layerShared.ViewportLeft.Size = _eyeTextureSwapChains[0].Size; _layerShared.ViewportRight.Position = new Vector2i(0, 0); _layerShared.ViewportRight.Size = _eyeTextureSwapChains[1].Size; _layerShared.FovLeft = _hmdDesc.DefaultEyeFov[0]; _layerShared.FovRight = _hmdDesc.DefaultEyeFov[1]; _eyeRenderDesc[0] = _ovr.GetRenderDesc(_sessionPtr, EyeType.Left, _hmdDesc.DefaultEyeFov[0]); _hmdToEyeOffset[1] = _eyeRenderDesc[1].HmdToEyePose.Position; _eyeRenderDesc[1] = _ovr.GetRenderDesc(_sessionPtr, EyeType.Right, _hmdDesc.DefaultEyeFov[1]); _hmdToEyeOffset[1] = _eyeRenderDesc[1].HmdToEyePose.Position; // Create MSAA back buffer if needed UpdateMsaaBackBuffer(_eyeTextureSwapChains[0].Size.Width, _eyeTextureSwapChains[0].Size.Height, _multisamplingCount); _mirrorTextureDesc = new MirrorTextureDesc() { Format = SharpDXHelpers.GetTextureFormat(dxScene.BackBufferDescription.Format), Height = dxScene.BackBufferDescription.Height, MiscFlags = dxScene.BackBufferDescription.MipLevels != 1 ? TextureMiscFlags.AllowGenerateMips : TextureMiscFlags.None, Width = dxScene.BackBufferDescription.Width }; // FloorLevel will give tracking poses where the floor height is 0 _ovr.SetTrackingOriginType(_sessionPtr, TrackingOrigin.EyeLevel); IntPtr mirrorTexturePtr; var result = _ovr.CreateMirrorTextureDX(_sessionPtr, dxScene.Device.NativePointer, ref _mirrorTextureDesc, out mirrorTexturePtr); if (result < Ab3d.OculusWrap.Result.Success) { var lastError = _ovr.GetLastErrorInfo(); throw new OvrException("Failed to create Oculus mirror texture: " + lastError.ErrorString, lastError.Result); } _mirrorTexture = new OculusMirrorTexture(_ovr, _sessionPtr, mirrorTexturePtr); // Retrieve the Direct3D texture contained in the Oculus MirrorTexture. IntPtr mirrorTextureComPtr; result = _mirrorTexture.GetBufferDX(typeof(Texture2D).GUID, out mirrorTextureComPtr); if (result < Ab3d.OculusWrap.Result.Success) { var lastError = _ovr.GetLastErrorInfo(); throw new OvrException("Failed to retrieve the texture from the created mirror texture buffer: " + lastError.ErrorString, lastError.Result); } // Create a managed Texture2D, based on the unmanaged texture pointer. _mirrorTextureDX = new Texture2D(mirrorTextureComPtr); if (dxScene.DXDevice.IsDebugDevice) { _mirrorTextureDX.DebugName = "OculusMirrorTexture"; } // To prevent DirectX from rendering more then one frame in the background, // we need to set the MaximumFrameLatency to 1. // This prevents occasional dropped frames in Oculus Rift. var dxgiDevice = dxScene.Device.QueryInterface <SharpDX.DXGI.Device1>(); if (dxgiDevice != null) { dxgiDevice.MaximumFrameLatency = 1; dxgiDevice.Dispose(); } _frameIndex = 0; _matrixCamera = new MatrixCamera(); }
/// <summary> /// Get display matrix /// </summary> /// <param name="camera">Camera</param> /// <param name="width">Viewport width</param> /// <param name="height">Viewport height</param> /// <returns>Display matrix</returns> public static Matrix3D GetDisplayMatrix(MatrixCamera camera, double width, double height) { return((camera.ViewMatrix * camera.ProjectionMatrix) * GetHomogeneousToViewportTransform3D(width, height)); }
/// <summary> /// Rendering /// </summary> /// <param name="sender">Sender</param> /// <param name="e">Event arguments</param> void CompositionTarget_Rendering(object sender, EventArgs e) { Quaternion q = Quaternion.Identity; if (Keyboard.IsKeyDown(Key.Left)) { q *= new Quaternion(new Vector3D(0, 3, 0), 5); } if (Keyboard.IsKeyDown(Key.Right)) { q *= new Quaternion(new Vector3D(0, 3, 0), -5); } if (Keyboard.IsKeyDown(Key.Up)) { q *= new Quaternion(new Vector3D(3, 0, 0), 5); } ; if (Keyboard.IsKeyDown(Key.Down)) { q *= new Quaternion(new Vector3D(3, 0, 0), -5); } /* * * Vector3D l = camera.Transform.Transform( camera.LookDirection ); * Matrix3D rotateLeft = new Matrix3D(); * rotateLeft.Rotate( new Quaternion( new Vector3D( 0, 3, 0 ), 90 ) ); * * if ( Keyboard.IsKeyDown( Key.Up ) ) FPSCamera( l * 0.05 ); * if ( Keyboard.IsKeyDown( Key.Down ) ) FPSCamera( -l * 0.05 ); * if ( Keyboard.IsKeyDown( Key.Left ) ) FPSCamera( rotateLeft.Transform( l ) * 0.05 ); * if ( Keyboard.IsKeyDown( Key.Right ) ) FPSCamera( -rotateLeft.Transform( l ) * 0.05 ); */ if (!q.IsIdentity) { AxisAngleRotation3D rotation = trackball.Rotation; Quaternion newQ = new Quaternion(rotation.Axis, rotation.Angle) * q; rotation.Axis = newQ.Axis; rotation.Angle = newQ.Angle; } if (cameraChanged && tree != null) { cameraChanged = false; MatrixCamera matrixCamera = FactCubeCamera.CreateMatrixCamera(camera, ActualWidth, ActualHeight, 0.01, 3); Matrix3D viewProjectionMatrix = matrixCamera.ViewMatrix * matrixCamera.ProjectionMatrix; Frustum frustum = new Frustum(viewProjectionMatrix, true); DepthPlanes depthPlanes = new DepthPlanes(camera, tree.Bounds, 1d / (dimensionSize - 1)); Point3D cameraPos = camera.Transform.Transform(camera.Position); List <IBSPItem> items = new List <IBSPItem>(); RenderTree(tree, items, cameraPos, frustum, depthPlanes); Matrix3D displayMatrix = viewProjectionMatrix * FactCubeCamera.GetHomogeneousToViewportTransform3D(ActualWidth, ActualHeight); //canvas.Children.Clear(); Render3D(tree, items, cameraPos, displayMatrix, frustum, depthPlanes); } }
public void Create3D(Viewport3D vp) { this.vp = vp; vp.IsHitTestVisible = false; vp.ClipToBounds = false; RenderOptions.SetEdgeMode(vp, EdgeMode.Aliased); things3d = new List <Thing3D>(); things3d.Add(new Thing3D("directional light", new DirectionalLight(Colors.White, new Vector3D(1.1, -1.1, -1.1)))); things3d.Add(new Thing3D("directional light", new DirectionalLight(Colors.White, new Vector3D(-1.1, 0.1, 1.1)))); things3d.Add(new Thing3D("directional light", new DirectionalLight(Colors.White, new Vector3D(1.1, 0.1, -1.1)))); things3d.Add(new Thing3D("directional light", new DirectionalLight(Colors.White, new Vector3D(-1, 0, -1)))); things3d.Add(new Thing3D("directional light", new AmbientLight(Color.FromRgb(55, 55, 55)))); /* [ m11 m12 m13 m14 ] * [ m21 m22 m23 m24 ] * [ m31 m32 m33 m34 ] * [ offsetX offsetY offsetZ m44 ] */ MatrixCamera matrix_camera = new MatrixCamera(); view_matrix = Matrix3D.Parse("0.539750657951516,0.559641738748834,-0.62886433472657,0,7.37257477290143E-18,0.747025071240996,0.66479586561394,0,0.841824938595261,-0.358824005868569,0.403207273708604,0,264.62356046239,-435.674414360455,-1350.56665818377,1"); vp.Camera = new MatrixCamera(view_matrix, perspective_matrix); update_perspective_matrix(); vp.SizeChanged += new SizeChangedEventHandler(vp_SizeChanged); my_node_grid = new node_grid(new Point3D(0, 1500, 0), 500, 500, 20, 20, 1, 5000000, 1500, 2500000, 1000, 3000000, 1000); // MINIMUM AXIAL VERTEX DENSITY ~= 500 / 20 = 25 units / node_count my_box = new box(new Point3D(0, 300, 1000), 200, 100, 50, .3, 0.01); my_box2 = new box(new Point3D(0, 300, -400), 150, 20, 100, .3, 0.01); // my_sphere = new sphere(new Point3D(300, 200, 0), 100, 40, 20, 5.0, 0.01, mg); my_sphere = new sphere(new Point3D(000, 500, 02), 200, 40, 20, 10.0, .01, new DiffuseMaterial(MakeSphereGrid())); ModelVisual3D mv3d2 = new ModelVisual3D(); mv3d2.Content = my_sphere.all_models; vp.Children.Add(mv3d2); //things3d.Add(); things3d.Add(new Thing3D("box", my_box.model)); things3d.Add(new Thing3D("box", my_box2.model)); things3d.Add(new Thing3D("mesh", my_node_grid.model)); int x = -750; things3d.Add(new Thing3D("floor", GenerateFloorMesh(MakeGrid(), x))); for (int n = 0; n < things3d.Count; n++) { ModelVisual3D mv3d = new ModelVisual3D(); mv3d.Content = things3d[n].model; vp.Children.Add(mv3d); } CompositionTarget.Rendering += new EventHandler(Update); }