Exemplo n.º 1
0
        /// <summary>
        /// Process the depth input
        /// </summary>
        /// <param name="depthData">The depth data containing depth pixels and frame timestamp</param>
        private void ProcessDepthData(DepthData depthData)
        {
            try
            {
                if (null != this.volume && !this.savingMesh)
                {
                    // Ensure frame resources are ready
                    this.AllocateFrames();

                    // Check near mode
                    this.CheckNearMode();

                    // To enable playback of a .xed file through Kinect Studio and reset of the reconstruction
                    // if the .xed loops, we test for when the frame timestamp has skipped a large number.
                    // Note: this will potentially continually reset live reconstructions on slow machines which
                    // cannot process a live frame in less time than the reset threshold. Increase the number of
                    // milliseconds if this is a problem.
                    this.CheckResetTimeStamp(depthData.FrameTimestamp);

                    // Convert depth frame to depth float frame
                    FusionDepthProcessor.DepthToDepthFloatFrame(
                                            depthData.DepthImagePixels,
                                            this.width,
                                            this.height,
                                            this.depthFloatFrame,
                                            this.minDepthClip,
                                            this.maxDepthClip,
                                            this.MirrorDepth);

                    // Render depth float frame
                    RenderDepthFloatImage(this.depthFloatFrame, ref this.depthFloatFrameDepthPixels, ref this.depthFloatFramePixels, ref this.depthFloatFrameBitmap, this.depthFloatImage);

                    // Align new depth float image with reconstruction
                    bool trackingSucceeded = this.volume.AlignDepthFloatToReconstruction(
                        this.depthFloatFrame,
                        FusionDepthProcessor.DefaultAlignIterationCount,
                        this.deltaFromReferenceFrame,
                        out this.alignmentEnergy,
                        this.worldToCameraTransform);

                    bool ifAddedCameraPose = false;

                    if (!trackingSucceeded)
                    {
                        this.trackingErrorCount++;

                        // Show tracking error on status bar
                        this.ShowStatusMessage(Properties.Resources.CameraTrackingFailed);
                    }
                    else
                    {
                        // Get updated camera transform from image alignment
                        Matrix4 calculatedCameraPos = this.volume.GetCurrentWorldToCameraTransform();

                        cameraPose.Add(calculatedCameraPos);
                        ifAddedCameraPose = true;

                        if (curColorData != null)
                            colorFrames.Add(curColorData);

                        // Render delta from reference frame
                        this.RenderAlignDeltasFloatImage(this.deltaFromReferenceFrame, ref this.deltaFromReferenceFrameBitmap, this.deltaFromReferenceImage);

                        // Clear track error count
                        this.trackingErrorCount = 0;

                        this.worldToCameraTransform = calculatedCameraPos;

                        // Integrate the frame to volume
                        if (!this.PauseIntegration)
                        {
                            this.volume.IntegrateFrame(this.depthFloatFrame, this.integrationWeight, this.worldToCameraTransform);
                        }

                    }

                    if (AutoResetReconstructionWhenLost && !trackingSucceeded && this.trackingErrorCount >= MaxTrackingErrors)
                    {
                        // Bad tracking
                        this.ShowStatusMessage(Properties.Resources.ResetVolumeAuto);

                        // Automatically Clear Volume and reset tracking if tracking fails
                        this.ResetReconstruction();
                    }

                    // Calculate the point cloud of integration
                    this.volume.CalculatePointCloud(this.pointCloudFrame, this.worldToCameraTransform);

                    // add to list
                    if (ifAddedCameraPose)
                        pointCloudFrames.Add(pointCloudFrame);

                    // Map X axis to blue channel, Y axis to green channel and Z axiz to red channel,
                    // normalizing each to the range [0, 1].
                    Matrix4 worldToBGRTransform = new Matrix4();
                    worldToBGRTransform.M11 = this.voxelsPerMeter / this.voxelsX;
                    worldToBGRTransform.M22 = this.voxelsPerMeter / this.voxelsY;
                    worldToBGRTransform.M33 = this.voxelsPerMeter / this.voxelsZ;
                    worldToBGRTransform.M41 = 0.5f;
                    worldToBGRTransform.M42 = 0.5f;
                    worldToBGRTransform.M44 = 1.0f;

                    // Shade point cloud frame for rendering
                    FusionDepthProcessor.ShadePointCloud(this.pointCloudFrame, this.worldToCameraTransform, worldToBGRTransform, this.shadedSurfaceFrame, this.shadedSurfaceNormalsFrame);

                    // Render shaded surface frame or shaded surface normals frame
                    RenderColorImage(this.displayNormals ? this.shadedSurfaceNormalsFrame : this.shadedSurfaceFrame, ref this.shadedSurfaceFramePixels, ref this.shadedSurfaceFrameBitmap, this.shadedSurfaceImage);

                    if (trackingSucceeded)
                    {
                        // Increase processed frame counter
                        this.processedFrameCount++;
                    }
                }
            }
            catch (InvalidOperationException ex)
            {
                this.ShowStatusMessage(ex.Message);
            }
            finally
            {
                this.processing = false;
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void OnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
        {
            // Open depth frame
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame && !this.processing)
                {
                    DepthData depthData = new DepthData();

                    // Save frame timestamp
                    depthData.FrameTimestamp = depthFrame.Timestamp;

                    // Create local depth pixels buffer
                    depthData.DepthImagePixels = new DepthImagePixel[depthFrame.PixelDataLength];

                    // Copy depth pixels to local buffer
                    depthFrame.CopyDepthImagePixelDataTo(depthData.DepthImagePixels);

                    this.width = depthFrame.Width;
                    this.height = depthFrame.Height;

                    // Use dispatcher object to invoke ProcessDepthData function to process
                    this.Dispatcher.BeginInvoke(
                                        DispatcherPriority.Background,
                                        (Action<DepthData>)((d) => { this.ProcessDepthData(d); }),
                                        depthData);

                    // Mark one frame will be processed
                    this.processing = true;
                }
            }
        }