unsafe void UpdateFusionFrame()
        {
            //KinectのDepthImageデータからFusionのDepthImageFrameを作成
            reconstruction.DepthToDepthFloatFrame(depthImagePixels, depthImageFrame,
                                                  FusionDepthProcessor.DefaultMinimumDepth, FusionDepthProcessor.DefaultMaximumDepth, true);

            //depthImageFrameに平滑化をかけてsmoothDepthFrameに格納
            reconstruction.SmoothDepthFloatFrame(depthImageFrame, smoothDepthImageFrame, SmoothingKernelWidth, SmoothingDistanceThreshold);

            //色情報を取得するためにDepthImage各点のColorImageでの位置を計算
            ColorSpacePoint[] points = new ColorSpacePoint[depthWidth * depthHeight];
            coordinateMapper.MapDepthFrameToColorSpace(depthImagePixels, points);

            //colorImageFrameBufferに色情報を格納
            int[] colorImageFrameBuffer = new int[depthWidth * depthHeight];
            fixed(byte *ptrColorImagePixels = colorImagePixels)
            {
                int *rawColorPixels = (int *)ptrColorImagePixels;

                Parallel.For(0, depthHeight,
                             y =>
                {
                    for (int x = 0; x < depthWidth; x++)
                    {
                        int index             = y * depthWidth + x;
                        ColorSpacePoint point = points[index];
                        int colorX            = (int)(Math.Ceiling(point.X));
                        int colorY            = (int)(Math.Ceiling(point.Y));
                        if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                        {
                            colorImageFrameBuffer[index] = rawColorPixels[colorY * colorWidth + colorX];
                        }
                    }
                });
            }

            //colorImageFrameBufferからFusionのcolorImageFrameを作成
            colorImageFrame.CopyPixelDataFrom(colorImageFrameBuffer);

            //現在のカメラ位置や角度の情報を計算に反映させるため、worldToCameraTransformを最新に更新
            worldToCameraTransform = reconstruction.GetCurrentWorldToCameraTransform();

            //最新のsmoothDepthImageFrame,colorImageFrameをFusionのReconstructionオブジェクトに反映
            float alignmentEnergy = 0.0f;
            bool  ret             = reconstruction.ProcessFrame(smoothDepthImageFrame, colorImageFrame, FusionDepthProcessor.DefaultAlignIterationCount,
                                                                FusionDepthProcessor.DefaultIntegrationWeight, FusionDepthProcessor.DefaultColorIntegrationOfAllAngles,
                                                                out alignmentEnergy, worldToCameraTransform);

            //反映結果がエラーならカウントし、カウンタが100を越えたら初めからやり直す
            if (!ret)
            {
                if (++errorCount >= 100)
                {
                    errorCount = 0;
                    reset();
                }
            }
            //結果を格納
            result();
        }
        private void TrackCamera_AlignPointClouds(ref Matrix4 calculatedCameraPose)
        {
            DownsampleDepthFrameNearestNeighbor(downsampledDepthFloatFrame, DownsampleFactor);
            volume.SmoothDepthFloatFrame(downsampledDepthFloatFrame, downsampledSmoothDepthFloatFrame, 1, 0.04f); //TODO: Magic numbers
            FusionDepthProcessor.DepthFloatFrameToPointCloud(downsampledSmoothDepthFloatFrame, downsampledDepthPointCloudFrame);
            volume.CalculatePointCloud(downsampledRaycastPointCloudFrame, calculatedCameraPose);
            Matrix4 initialPose = calculatedCameraPose;

            FusionDepthProcessor.AlignPointClouds(downsampledRaycastPointCloudFrame, downsampledDepthPointCloudFrame, FusionDepthProcessor.DefaultAlignIterationCount, downsampledDeltaFromReferenceFrameColorFrame, ref calculatedCameraPose);
            UpsampleColorDeltasFrameNearestNeighbor(DownsampleFactor);
            //UpdateAlignDeltas(); //TODO: Necessary?
        }