示例#1
0
 /// <summary>
 /// Called when a ray-casted view of the reconstruction is available for display in the UI 
 /// </summary>
 /// <param name="sensor">The the sensor in use</param>
 private void ReconstructFrameComplete(ReconstructionSensor sensor)
 {
     // Render shaded surface frame or shaded surface normals frame
     if (sensor == null)
     {
         // Use Graphics camera
         RenderColorImage(
             this.displayNormals ? this.ShadedSurfaceNormalsFrame : this.ShadedSurfaceFrame,
             ref this.shadedSurfaceFramePixelsArgb,
             ref this.shadedSurfaceFrameBitmap,
             this.shadedSurfaceImage);
     }
     else
     {
         RenderColorImage(
             this.displayNormals ? sensor.ShadedSurfaceNormalsFrame : sensor.ShadedSurfaceFrame,
             ref this.shadedSurfaceFramePixelsArgb,
             ref this.shadedSurfaceFrameBitmap,
             this.shadedSurfaceImage);
     }
 }
示例#2
0
        /// <summary>
        /// Turn sensor Laser emitter on or off, silently ignoring any errors.
        /// Note that errors will occur if a Kinect for Xbox sensor is attached as this
        /// does not support controlling the Laser emitter.
        /// </summary>
        /// <param name="sensor">The reconstruction sensor.</param>
        /// <param name="forceOff">Set true to turn off the sensor, false to turn sensor on.</param>
        private void ChangeSensorEmitterState(ReconstructionSensor sensor, bool forceOff)
        {
            if (null != sensor && null != sensor.Sensor)
            {
                try
                {
 //                   sensor.Sensor.ForceInfraredEmitterOff = forceOff;
                }
                catch (InvalidOperationException)
                {
                    // Fail silently
                }
            }
        }
示例#3
0
        /// <summary>
        /// Render the reconstruction, optionally from the virtualCamera viewpoint
        /// </summary>
        /// <param name="sensor">Optionally, the the sensor to use for reconstruction rendering viewpoint, or null to use the graphics camera pose.</param>
        private void RenderReconstruction(ReconstructionSensor sensor)
        {
            try
            {
                Matrix4 cameraView = (sensor == null) ? this.virtualCamera.WorldToCameraMatrix4 : sensor.ReconCamera.WorldToCameraMatrix4;

                if (null != this.volume && !this.savingMesh && null != this.PointCloudFrame && null != this.ShadedSurfaceFrame
                    && null != this.ShadedSurfaceNormalsFrame)
                {
                    // Lock the volume operations
                    lock (this.reconstructionLock)
                    {
                        bool colorInUse = false;

                        // Calculate the point cloud of integration and optionally return the integrated color
                        foreach (ReconstructionSensor individualSensors in this.sensors)
                        {
                            // Take first sensor which is actually in use
                            if (individualSensors.UseSensor && individualSensors.CaptureColor)
                            {
                                colorInUse = true;
                                break;
                            }
                        }

                        if (this.colorCaptured && colorInUse)
                        {
                            this.volume.CalculatePointCloud(this.PointCloudFrame, this.ShadedSurfaceFrame, cameraView);
                        }
                        else
                        {
                            this.volume.CalculatePointCloud(this.PointCloudFrame, cameraView);

                            // Shade point cloud frame for rendering
                            FusionDepthProcessor.ShadePointCloud(
                                this.PointCloudFrame, cameraView, this.ShadedSurfaceFrame, this.ShadedSurfaceNormalsFrame);
                        }
                    }

                    // Run the UI update
                    Dispatcher.BeginInvoke((Action)(() => this.ReconstructFrameComplete(sensor)));
                }
            }
            catch (InvalidOperationException ex)
            {
                this.ShowStatusMessage(ex.Message);
            }
        }
示例#4
0
        /// <summary>
        /// Called when a depth frame is available for display in the UI 
        /// </summary>
        /// <param name="sensor">The the sensor in use</param>
        private void DepthFrameComplete(ReconstructionSensor sensor)
        {
            if (this.firstFrame)
            {
                this.firstFrame = false;

                // Render shaded surface frame or shaded surface normals frame - blank at this point
                RenderColorImage(
                    this.sensors[0].ShadedSurfaceFrame,
                    ref this.shadedSurfaceFramePixelsArgb,
                    ref this.shadedSurfaceFrameBitmap,
                    this.shadedSurfaceImage);
            }

            // Render depth float frame
            this.RenderDepthFloatImage(sensor.DepthFloatFrame, ref this.depthFloatFrameBitmap, this.depthFloatImage);
        }
示例#5
0
        private void doDepthStuff(ReconstructionSensor sensor)
        {
            tracked = this.volume.ProcessFrame(
                sensor.DepthFloatFrame,
                7,
                this.integrationWeight,
                sensor.ReconCamera.WorldToCameraMatrix4);
            
            if (tracked)
            {
                this.CameraChangeMatrix4 = this.volume.GetCurrentWorldToCameraTransform();// sensor.ReconCamera.WorldToCameraMatrix4;
                this.integrationWeight = this.integrationWeight++;
                if (this.integrationWeight > 1000)
                {
                    this.integrationWeight = 1000;
                }
                if (Math.Abs(accuracyOfTrack) < 0.1)
                {
                    sensor.ReconCamera.UpdateFrustumTransformMatrix4(CameraChangeMatrix4);
                    short[] volumeBlock = new short[2097152];
                    int[] colourVolumeBlock = new int[2097152];
                    
                    
                    this.volume.ExportVolumeBlock(0, 0, 0, 128, 128, 128, 1, volumeBlock, colourVolumeBlock);

                    int calcVolume = 0;
                    short[] allocatedVoxel = new short[2097152];
                    int[] allocatedColourVoxel = new int[2097152];
                    unsafe
                    {
                        fixed (int* volumeBlockPtr = &colourVolumeBlock[0])
                        {
                            fixed (int* allocatedVoxelPtr = &allocatedColourVoxel[0])
                            {
                                findAllocatedUnits(volumeBlockPtr, volumeBlock.Length, allocatedVoxelPtr, allocatedVoxel.Length);
                                reduction_sum_gpu_kernel(allocatedVoxelPtr, allocatedVoxel.Length, &calcVolume);
                            }
                        }

                    }
                    

                    this.VolumeLabel.Text = calcVolume.ToString();
                    //sensor.ReconSensorControl.AngleX = 180/Math.PI*Math.Atan2(CameraChangeMatrix4.M21, CameraChangeMatrix4.M21);

                    //sensor.ReconSensorControl.AngleY = 180 / Math.PI * Math.Atan2(CameraChangeMatrix4.M31,
                    //Math.Sqrt(
                    //(
                    //(CameraChangeMatrix4.M32) * (CameraChangeMatrix4.M32)
                    //) + (
                    //(CameraChangeMatrix4.M33) * (CameraChangeMatrix4.M33)
                    //)
                    //));

                    //sensor.ReconSensorControl.AngleZ = 180 / Math.PI * Math.Atan2(sensor.ReconCamera.WorldToCameraMatrix4.M32, CameraChangeMatrix4.M33);
                }

                tracked = false;
            }
            else
            {
                Matrix4 newView = new Matrix4();
                sensor.ReconCamera.RetreiveFrustumTransformMatrix4(newView);
                this.integrationWeight = this.integrationWeight--;
                if (this.integrationWeight < 1)
                {
                    this.integrationWeight = 1;
                }
                sensor.ReconCamera.WorldToCameraMatrix4 = this.volume.GetCurrentWorldToCameraTransform();
                sensor.ReconSensorControl.CameraTransformMatrix = this.volume.GetCurrentWorldToCameraTransform();
            }
        }
示例#6
0
        private void ReconstructDepthData(ReconstructionSensor sensor)
        {
            try
            {
                if (null != this.volume && !this.savingMesh)
                {
                    this.Dispatcher.BeginInvoke(
                        (Action)(() =>
                            {
                                this.statusBarText.Text = Properties.Resources.ReconstructFrame
                                                          + this.processedFrameCount.ToString(CultureInfo.CurrentCulture);
                            }));

                    // Process and display depth data
                    this.PreProcessDepthData(sensor);

                    // We would do camera tracking here if required...
                   

                    // Lock the volume operations
                    lock (this.reconstructionLock)
                    {
                        // Integrate the frame to volume
                        if (!this.PauseIntegration)
                        {
                            // Map color to depth if we want to integrate color too
                            if (sensor.CaptureColor && null != sensor.MappedColorFrame)
                            {
                                // Pre-process color
                                sensor.MapColorToDepth();

                                Dispatcher.BeginInvoke(
                                    (Action)
                                    (() =>
                                     this.volume.IntegrateFrame(
                                         sensor.DepthFloatFrame,
                                         sensor.MappedColorFrame,
                                         this.integrationWeight,
                                         FusionDepthProcessor.DefaultColorIntegrationOfAllAngles,
                                         sensor.ReconCamera.WorldToCameraMatrix4)));

                                // Flag that we have captured color
                                this.colorCaptured = true;

                            }
                            else if (this.tracking)
                            {
                                if (first)
                                {
                                    float centreDepth = this.depthFloatFrameDepthPixels[this.depthHeight * this.depthWidth / 2];
                                    if (centreDepth > 0.0)
                                    {
                                    //    Dispatcher.BeginInvoke(
                                    //(Action)
                                    //(() =>
                                    //    sensor.ReconSensorControl.AxisDistance = centreDepth
                                    //    ));
                                    }
                                    first = false;
                                }
                                Dispatcher.BeginInvoke(
                                    (Action)
                                    (() =>
                                        doDepthStuff(sensor)
                                ));
                            }
                            else
                            {
                                first = true;
                                // Just integrate depth
                                Dispatcher.BeginInvoke(
                                    (Action)
                                    (() =>
                                        this.volume.IntegrateFrame(
                                            sensor.DepthFloatFrame, this.integrationWeight, sensor.ReconCamera.WorldToCameraMatrix4)));

                            }
                        }
                    }
                    if (this.processedFrameCount % 2 == 0)
                    {
                        Dispatcher.BeginInvoke((Action)(() => this.RenderReconstruction(this.useCameraViewInReconstruction ? sensor : null)));
                    }
                }
            }
            catch (InvalidOperationException ex)
            {
                this.ShowStatusMessage(ex.Message);
            }
        }
示例#7
0
        /// <summary>
        /// Just convert to float and draw the depth frame
        /// </summary>
        /// <param name="sensor">The sensor where the depth frame originated.</param>
        private void PreProcessDepthData(ReconstructionSensor sensor)
        {
            if (sensor == null)
            {
                return;
            }

            // Check near mode
            //sensor.CheckNearMode();

            // Convert depth frame to depth float frame
            this.volume.DepthToDepthFloatFrame(
                sensor.DepthImagePixels, sensor.DepthFloatFrame, sensor.MinDepthClip, sensor.MaxDepthClip, sensor.MirrorDepth);

            // Run the UI update on the UI thread
            Dispatcher.BeginInvoke((Action)(() => this.DepthFrameComplete(sensor)));
        }
示例#8
0
        /// <summary>
        /// Add a sensor control tab to the tab collection.
        /// </summary>
        /// <param name="sensor">The sensor to add a control for.</param>
        /// <param name="isSupportNearMode">Indicate whether the sensor supports near mode.</param>
        private void AddSensorTabControl(ReconstructionSensor sensor, bool isSupportNearMode)
        {
            ItemCollection items = tabControl.Items;

            items.Add(new TabItem());
            int index = items.Count - 1;
            TabItem t = items[index] as TabItem;

            t.Header = "Camera " + index.ToString(CultureInfo.CurrentCulture); // sensor.UniqueKinectId could also be used
            t.Content = sensor.ReconSensorControl;

            if (!isSupportNearMode)
            {
                var sensorControl = t.Content as ReconstructionSensorControl;
                sensorControl.checkBoxNearMode.IsEnabled = false;
            }

            // Select this added tab index
            tabControl.SelectedIndex = index;
        }
示例#9
0
        /// <summary>
        /// Start color stream at specific resolution
        /// </summary>
        /// <param name="sensor">The reconstruction sensor instance.</param>
        /// <param name="format">The resolution of image in color stream.</param>
        private void StartColorStream(ReconstructionSensor sensor, ColorImageFormat format)
        {
            if (sensor == null)
            {
                return;
            }

            try
            {
                // Enable color stream, register event handler and start
                sensor.ColorFrameReady += this.OnColorFrameReady;
                sensor.StartColorStream(format);
            }
            catch (IOException ex)
            {
                // Device is in use
                this.ShowStatusMessage(ex.Message);

                return;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                this.ShowStatusMessage(ex.Message);

                return;
            }
        }
示例#10
0
        /// <summary>
        /// Start depth stream at specific resolution
        /// </summary>
        /// <param name="sensor">The reconstruction sensor instance.</param>
        /// <param name="format">The resolution of image in depth stream.</param>
        /// <returns>Returns true if the sensor supports near mode.</returns>
        private bool StartDepthStream(ReconstructionSensor sensor, DepthImageFormat format)
        {
            if (sensor == null)
            {
                return true;
            }

            bool isSupportNearMode = true;
            try
            {
                // Enable depth stream, register event handler and start
                sensor.DepthFrameReady += this.OnDepthFrameReady;
                isSupportNearMode = sensor.StartDepthStream(format);
            }
            catch (IOException ex)
            {
                // Device is in use
                this.ShowStatusMessage(ex.Message);

                return isSupportNearMode;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                this.ShowStatusMessage(ex.Message);

                return isSupportNearMode;
            }

            try
            {
                // Make sure Lasers are turned on
                sensor.Sensor.ForceInfraredEmitterOff = false;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                // show an error message just this once
                this.ShowStatusMessage(ex.Message);
            }

            return isSupportNearMode;
        }
示例#11
0
        /// <summary>
        /// Un-subscribe from events and stop sensor streams
        /// </summary>
        /// <param name="sensor">The sensor object to stop.</param>
        private void UnsubscribeAndStopSensor(ReconstructionSensor sensor)
        {
            if (null == sensor)
            {
                return;
            }

            sensor.DepthFrameReady -= this.OnDepthFrameReady;
            sensor.ColorFrameReady -= this.OnColorFrameReady;
            sensor.RequireResetEvent -= this.ResetReconstruction;
            sensor.RequireRenderEvent -= this.OnKinectSensorTransformationChanged;
            sensor.SensorTransformationChanged -= this.OnKinectSensorTransformationChanged;
            sensor.AllSetCaptureColorEvent -= this.OnCaptureColorChanged;
            sensor.StopDepthStream();
            sensor.StopColorStream();
        }