Exemple #1
0
        private void ProcessFrame(byte bodyIndex)
        {
            try
            {
                RemoveNonBodyPixels(bodyIndex);

                reconstruction.DepthToDepthFloatFrame(rawDepthData, floatDepthFrame,
                                                      MIN_DEPTH, MAX_DEPTH,
                                                      false);

                var aligned = reconstruction.ProcessFrame(
                    floatDepthFrame,
                    FusionDepthProcessor.DefaultAlignIterationCount,
                    FusionDepthProcessor.DefaultIntegrationWeight,
                    out alignmentEnergy,
                    worldToCameraTransform);
                if (aligned)
                {
                    syncContext.Post(() => FrameAligned?.Invoke(this, EventArgs.Empty));
                    worldToCameraTransform = reconstruction.GetCurrentWorldToCameraTransform();
                }
            }
            catch (InvalidOperationException)
            {
            }

            try
            {
                reconstruction.CalculatePointCloud(pointCloudFrame, worldToCameraTransform);

                FusionDepthProcessor.ShadePointCloud(pointCloudFrame, worldToCameraTransform, surfaceFrame, null);
                SurfaceBitmap.Access(data => surfaceFrame.CopyPixelDataTo(data));

                syncContext.Post(() => SurfaceBitmapUpdated?.Invoke(this, EventArgs.Empty));
            }
            catch (InvalidOperationException)
            {
            }
        }
        /// <summary>
        /// Up sample color delta from reference frame with nearest neighbor - replicates pixels
        /// </summary>
        /// <param name="factor">The up sample factor (2=x*2,y*2, 4=x*4,y*4, 8=x*8,y*8, 16=x*16,y*16).</param>
        private unsafe void UpsampleColorDeltasFrameNearestNeighbor(int factor)
        {
            #region ErrorHandling
            if (null == downsampledDeltaFromReferenceFrameColorFrame || null == downsampledDeltaFromReferenceColorPixels || null == deltaFromReferenceFramePixelsArgb)
            {
                throw new ArgumentException("inputs null");
            }
            if (false == (2 == factor || 4 == factor || 8 == factor || 16 == factor))
            {
                throw new ArgumentException("factor != 2, 4, 8 or 16");
            }
            int upsampleWidth  = downsampledWidth * factor;
            int upsampleHeight = downsampledHeight * factor;
            if (depthWidth != upsampleWidth || depthHeight != upsampleHeight)
            {
                throw new ArgumentException("upsampled image size != depth size");
            }
            #endregion
            downsampledDeltaFromReferenceFrameColorFrame.CopyPixelDataTo(downsampledDeltaFromReferenceColorPixels);

            // Here we make use of unsafe code to just copy the whole pixel as an int for performance reasons, as we do
            // not need access to the individual rgba components.
            fixed(int *rawColorPixelPtr = downsampledDeltaFromReferenceColorPixels)
            {
                int *rawColorPixels = (int *)rawColorPixelPtr;

                // Note we run this only for the source image height pixels to sparsely fill the destination with rows
                Parallel.For(
                    0,
                    downsampledHeight,
                    y =>
                {
                    int destIndex        = y * upsampleWidth * factor;
                    int sourceColorIndex = y * downsampledWidth;

                    for (int x = 0; x < downsampledWidth; ++x, ++sourceColorIndex)
                    {
                        int color = rawColorPixels[sourceColorIndex];

                        // Replicate pixels horizontally
                        for (int colFactorIndex = 0; colFactorIndex < factor; ++colFactorIndex, ++destIndex)
                        {
                            // Replicate pixels vertically
                            for (int rowFactorIndex = 0; rowFactorIndex < factor; ++rowFactorIndex)
                            {
                                // Copy color pixel
                                deltaFromReferenceFramePixelsArgb[destIndex + (rowFactorIndex * upsampleWidth)] = color;
                            }
                        }
                    }
                });
            }

            int sizeOfInt   = sizeof(int);
            int rowByteSize = downsampledHeight * sizeOfInt;

            // Duplicate the remaining rows with memcpy
            for (int y = 0; y < downsampledHeight; ++y)
            {
                // iterate only for the smaller number of rows
                int srcRowIndex = upsampleWidth * factor * y;

                // Duplicate lines
                for (int r = 1; r < factor; ++r)
                {
                    int index = upsampleWidth * ((y * factor) + r);

                    System.Buffer.BlockCopy(
                        deltaFromReferenceFramePixelsArgb, srcRowIndex * sizeOfInt, deltaFromReferenceFramePixelsArgb, index * sizeOfInt, rowByteSize);
                }
            }
        }
 void DrawFusionFrame()
 {
     int[] surfaceImagePixels = new int[depthWidth * depthHeight];
     surfaceImageFrame.CopyPixelDataTo(surfaceImagePixels);
     drawBitmap.WritePixels(drawRect, surfaceImagePixels, drawStride, 0);
 }
        private void ProcessDepthData(DepthImagePixel[] depthPixels)
        {
            try {
                if (processingSaveFile)
                {
                    return;
                }

                // DepthImagePixel から DepthFloatFrame に変換する
                FusionDepthProcessor.DepthToDepthFloatFrame(
                    depthPixels,
                    DepthWidth,
                    DepthHeight,
                    depthFloatBuffer,
                    FusionDepthProcessor.DefaultMinimumDepth,
                    FusionDepthProcessor.DefaultMaximumDepth,
                    false);

                // フレームを処理する
                bool trackingSucceeded = volume.ProcessFrame(
                    depthFloatBuffer,
                    FusionDepthProcessor.DefaultAlignIterationCount,
                    FusionDepthProcessor.DefaultIntegrationWeight,
                    volume.GetCurrentWorldToCameraTransform());
                if (!trackingSucceeded)
                {
                    // 一定数エラーになったらリセット
                    // Kinectまたは対象を素早く動かしすぎ などの場合
                    trackingErrorCount++;
                    if (trackingErrorCount >= 100)
                    {
                        Trace.WriteLine(@"tracking error.");

                        trackingErrorCount = 0;
                        volume.ResetReconstruction(Matrix4.Identity);
                    }

                    return;
                }

                // PointCloudを取得する
                volume.CalculatePointCloud(
                    pointCloudBuffer,
                    volume.GetCurrentWorldToCameraTransform());

                // PointCloudを2次元のデータに描画する
                FusionDepthProcessor.ShadePointCloud(
                    pointCloudBuffer,
                    volume.GetCurrentWorldToCameraTransform(),
                    shadedSurfaceColorFrame,
                    null);

                // 2次元のデータをBitmapに書きだす
                var colorPixels = new int[depthPixels.Length];
                shadedSurfaceColorFrame.CopyPixelDataTo(colorPixels);

                ImageKinectFusion.Source = BitmapSource.Create(DepthWidth, DepthHeight, 96, 96,
                                                               PixelFormats.Bgr32, null, colorPixels, DepthWidth * 4);
            }
            catch (Exception ex) {
                Trace.WriteLine(ex.Message);
            }
            finally {
                processingFrame = false;
            }
        }
Exemple #5
0
        /// <summary>
        /// processes the depth data package into the kinect fusion volume
        /// </summary>
        /// <param name="pKdp">the data package</param>
        void processDepthData(KinectDataPackage pKdp, System.Threading.CancellationToken pCancelToken)
        {
            lock (canWorkLock)
            {
                Log.LogManager.updateAlgorithmStatus("Kinect Fusion integration");
                this.volume.ResetReconstruction(Matrix4.Identity);

                int picturesIntegrated = 0;
                foreach (ushort[] pDepth in pKdp.rawDepthData)
                {
                    pCancelToken.ThrowIfCancellationRequested();
                    WriteableBitmap            bitmap                  = new WriteableBitmap(this.depthFloatFrame.Width, this.depthFloatFrame.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                    FusionFloatImageFrame      depthFloatBuffer        = new FusionFloatImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    FusionPointCloudImageFrame pointCloudBuffer        = new FusionPointCloudImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    FusionColorImageFrame      shadedSurfaceColorFrame = new FusionColorImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    int[] voxelPixels = new int[this.depthFloatFrame.Width * this.depthFloatFrame.Height];

                    this.volume.DepthToDepthFloatFrame(
                        pDepth,
                        depthFloatBuffer,
                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.minDepthClip,
                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.maxDepthClip,
                        false);
                    float alignmentValue;
                    bool  trackingSucceeded = this.volume.ProcessFrame(depthFloatBuffer,
                                                                       Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.iterationCount,
                                                                       Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.integrationWeight,
                                                                       out alignmentValue,
                                                                       volume.GetCurrentWorldToCameraTransform());

                    // If camera tracking failed, no data integration or raycast for reference
                    // point cloud will have taken place, and the internal camera pose
                    // will be unchanged.
                    if (!trackingSucceeded)
                    {
                        trackingErrorCount++;
                    }
                    else
                    {
                        Matrix4 calculatedCameraPose = volume.GetCurrentWorldToCameraTransform();

                        // Set the camera pose and reset tracking errors
                        worldToCameraTransform = calculatedCameraPose;
                        trackingErrorCount     = 0;
                    }

                    // Calculate the point cloud
                    volume.CalculatePointCloud(pointCloudBuffer, worldToCameraTransform);

                    // Shade point cloud and render
                    FusionDepthProcessor.ShadePointCloud(
                        pointCloudBuffer,
                        worldToCameraTransform,
                        null,
                        shadedSurfaceColorFrame
                        );

                    shadedSurfaceColorFrame.CopyPixelDataTo(voxelPixels);

                    bitmap.WritePixels(
                        new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight),
                        voxelPixels,
                        bitmap.PixelWidth * sizeof(int),
                        0);

                    bitmap.Freeze();
                    OnNewFusionPictureEvent.BeginInvoke(pKdp.usedConfig.ID, bitmap, null, null);

                    picturesIntegrated++;
                    Log.LogManager.writeLogDebug("[DataIntegration:Reconstruction] " + picturesIntegrated + " of " + pKdp.rawDepthData.Count + " Pictures integrated");
                }

                //if request was calibration request, export meshes
                if (pKdp.usedConfig.clientRequestObject.requestType == ClientConfigObject.RequestType.calibration)
                {
                    exportMesh(volume, pKdp, false);
                    Log.LogManager.writeLog("[DataIntegration:Reconstruction] Mesh of " + pKdp.usedConfig.name + " exported.");
                    return;
                }

                //broadcast new point cloud
                PointCloud p = new PointCloud(volume);
                p.ConfigObject = pKdp.usedConfig;
                OnNewPointCloudEvent.BeginInvoke(p, null, null);
                Log.LogManager.writeLog("[DataIntegration:Reconstruction] All pictures of " + pKdp.usedConfig.name + " integrated");
                Log.LogManager.updateAlgorithmStatus("Done");
            }
        }