public CameraTestRenderer(DeviceResources deviceResources, PhysicalCamera physicalCamera) { this.deviceResources = deviceResources; this.physicalCamera = physicalCamera; CreateDeviceDependentResourcesAsync(); }
public CubemapMeshRenderer(DeviceResources deviceResources, PhysicalCamera physicalCamera) { DeviceResources = deviceResources; PhysicalCamera = physicalCamera; PhysicalCamera.FrameUpdated += OnSteadyFrameAvailable; Active = false; CubeMapUpdateRequired = false; Paused = false; }
public MeshTexturer(DeviceResources resources, PhysicalCamera camera, int resolution) { Resources = resources; Camera = camera; Resolution = resolution; Active = false; MeshTextures = new[] { ToDispose(new MeshTextureSet(resources, Resolution)), ToDispose(new MeshTextureSet(resources, Resolution)) }; CurrentTexture = 0; CreateDeviceDependantResources(); }
public MainRenderer(DeviceResources resources, PhysicalCamera camera) { Resources = resources; Camera = camera; Active = false; ProjectionRequested = false; UpdateRequested = false; Debug = false; ExportRequested = false; TextureDebugger = new TextureDebugRenderer(resources); MeshTexturer = new MeshTexturer(resources, camera, Resolution); MeshRenderer = new MeshRenderer(resources); Camera.FrameUpdated += RequestMeshProjection; SetupSpeechRecognition(); }
private void Calibrate(Picture picture, string uniqueSourceId, PhysicalCamera camera) { var corners = FindChessBoardCorners(picture, uniqueSourceId); if (corners == null) return; if (intrinsics != null) { extrinsics = CameraCalibration.FindExtrinsicCameraParams2(Projector.GetObjectPointsCopy(), corners, intrinsics); var ecp = extrinsics; var matrix = new Matrix((float)ecp.ExtrinsicMatrix[0, 0], -(float)ecp.ExtrinsicMatrix[1, 0], -(float)ecp.ExtrinsicMatrix[2, 0], 0, (float)ecp.ExtrinsicMatrix[0, 1], -(float)ecp.ExtrinsicMatrix[1, 1], -(float)ecp.ExtrinsicMatrix[2, 1], 0, (float)ecp.ExtrinsicMatrix[0, 2], -(float)ecp.ExtrinsicMatrix[1, 2], -(float)ecp.ExtrinsicMatrix[2, 2], 0, (float)ecp.ExtrinsicMatrix[0, 3], -(float)ecp.ExtrinsicMatrix[1, 3], -(float)ecp.ExtrinsicMatrix[2, 3], 1); camera.World = Matrix.Invert(matrix); camera.Localized = true; double fovx; double fovy; double focalLength; MCvPoint2D64f principalPoint; double pixelAspectRatio; intrinsics.GetIntrinsicMatrixValues(picture.Width, picture.Height, 0, 0, out fovx, out fovy, out focalLength, out principalPoint, out pixelAspectRatio); camera.Projection = Matrix.CreatePerspectiveFieldOfView(MathHelper.ToRadians((float)fovy), ((float)picture.Width) / ((float)picture.Height), 0.1f, 2.0f); } else if (imageCorners.Count < numTakes) { CvInvoke.cvDrawChessboardCorners(picture.Bgra.Ptr, this.Dim, corners, corners.Length, patternWasFound: 1); imageCorners.Add(corners); Picture.ExchangeAndFree(ref camera.calib2E, ref picture); } else { CalibrateWithChessBoardCorners(picture, uniqueSourceId); } }
public void SetHolographicSpace(HolographicSpace holographicSpace) { this.holographicSpace = holographicSpace; // // TODO: Add code here to initialize your content. // #if DRAW_SAMPLE_CONTENT // Initialize the sample hologram. spinningCubeRenderer = new SpinningCubeRenderer(deviceResources); physicalCamera = new PhysicalCamera(deviceResources.D3DDevice, false); physicalCamera.Initialize(); // cameraTestRenderer = new CameraTestRenderer(deviceResources, physicalCamera); // meshTestRenderer = new MeshTestRenderer(deviceResources, physicalCamera); meshCollectionTexturer = new MainRenderer(deviceResources, physicalCamera); spatialInputHandler = new SpatialInputHandler(); #endif // Use the default SpatialLocator to track the motion of the device. locator = SpatialLocator.GetDefault(); // Be able to respond to changes in the positional tracking state. locator.LocatabilityChanged += OnLocatabilityChanged; // Respond to camera added events by creating any resources that are specific // to that camera, such as the back buffer render target view. // When we add an event handler for CameraAdded, the API layer will avoid putting // the new camera in new HolographicFrames until we complete the deferral we created // for that handler, or return from the handler without creating a deferral. This // allows the app to take more than one frame to finish creating resources and // loading assets for the new holographic camera. // This function should be registered before the app creates any HolographicFrames. holographicSpace.CameraAdded += OnCameraAdded; // Respond to camera removed events by releasing resources that were created for that // camera. // When the app receives a CameraRemoved event, it releases all references to the back // buffer right away. This includes render target views, Direct2D target bitmaps, and so on. // The app must also ensure that the back buffer is not attached as a render target, as // shown in DeviceResources.ReleaseResourcesForBackBuffer. holographicSpace.CameraRemoved += OnCameraRemoved; // The simplest way to render world-locked holograms is to create a stationary reference frame // when the app is launched. This is roughly analogous to creating a "world" coordinate system // with the origin placed at the device's position as the app is launched. referenceFrame = locator.CreateStationaryFrameOfReferenceAtCurrentLocation(); //meshTestRenderer.Initialize(referenceFrame.CoordinateSystem); meshCollectionTexturer.Initialize(referenceFrame.CoordinateSystem); // Notes on spatial tracking APIs: // * Stationary reference frames are designed to provide a best-fit position relative to the // overall space. Individual positions within that reference frame are allowed to drift slightly // as the device learns more about the environment. // * When precise placement of individual holograms is required, a SpatialAnchor should be used to // anchor the individual hologram to a position in the real world - for example, a point the user // indicates to be of special interest. Anchor positions do not drift, but can be corrected; the // anchor will use the corrected position starting in the next frame after the correction has // occurred. }