unsafe void UpdateFusionFrame() { //KinectのDepthImageデータからFusionのDepthImageFrameを作成 reconstruction.DepthToDepthFloatFrame(depthImagePixels, depthImageFrame, FusionDepthProcessor.DefaultMinimumDepth, FusionDepthProcessor.DefaultMaximumDepth, true); //depthImageFrameに平滑化をかけてsmoothDepthFrameに格納 reconstruction.SmoothDepthFloatFrame(depthImageFrame, smoothDepthImageFrame, SmoothingKernelWidth, SmoothingDistanceThreshold); //色情報を取得するためにDepthImage各点のColorImageでの位置を計算 ColorSpacePoint[] points = new ColorSpacePoint[depthWidth * depthHeight]; coordinateMapper.MapDepthFrameToColorSpace(depthImagePixels, points); //colorImageFrameBufferに色情報を格納 int[] colorImageFrameBuffer = new int[depthWidth * depthHeight]; fixed(byte *ptrColorImagePixels = colorImagePixels) { int *rawColorPixels = (int *)ptrColorImagePixels; Parallel.For(0, depthHeight, y => { for (int x = 0; x < depthWidth; x++) { int index = y * depthWidth + x; ColorSpacePoint point = points[index]; int colorX = (int)(Math.Ceiling(point.X)); int colorY = (int)(Math.Ceiling(point.Y)); if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight)) { colorImageFrameBuffer[index] = rawColorPixels[colorY * colorWidth + colorX]; } } }); } //colorImageFrameBufferからFusionのcolorImageFrameを作成 colorImageFrame.CopyPixelDataFrom(colorImageFrameBuffer); //現在のカメラ位置や角度の情報を計算に反映させるため、worldToCameraTransformを最新に更新 worldToCameraTransform = reconstruction.GetCurrentWorldToCameraTransform(); //最新のsmoothDepthImageFrame,colorImageFrameをFusionのReconstructionオブジェクトに反映 float alignmentEnergy = 0.0f; bool ret = reconstruction.ProcessFrame(smoothDepthImageFrame, colorImageFrame, FusionDepthProcessor.DefaultAlignIterationCount, FusionDepthProcessor.DefaultIntegrationWeight, FusionDepthProcessor.DefaultColorIntegrationOfAllAngles, out alignmentEnergy, worldToCameraTransform); //反映結果がエラーならカウントし、カウンタが100を越えたら初めからやり直す if (!ret) { if (++errorCount >= 100) { errorCount = 0; reset(); } } //結果を格納 result(); }
/// <summary> /// Process input color image to make it equal in size to the depth image /// </summary> private unsafe FusionColorImageFrame ProcessColorForCameraPoseFinder(byte[] colorImagePixels) { var resampledColorFrame = new FusionColorImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT); if (KinectSettings.DEPTH_WIDTH != RawDepthWidth || KinectSettings.DEPTH_HEIGHT != RawDepthHeight || KinectSettings.COLOR_WIDTH != RawColorWidth || KinectSettings.COLOR_HEIGHT != RawColorHeight) { logger.Log(LogLevel.Error, "Cannot perform ProcessColorForCameraPoseFinder. Dimensions don't agree."); return(new FusionColorImageFrame(0, 0)); } float factor = RawColorWidth / RawDepthHeightWithSpecialRatio; const int FilledZeroMargin = (RawDepthHeight - RawDepthHeightWithSpecialRatio) / 2; // Here we make use of unsafe code to just copy the whole pixel as an int for performance reasons, as we do // not need access to the individual rgba components. fixed(byte *ptrColorPixels = colorImagePixels) { int *rawColorPixels = (int *)ptrColorPixels; Parallel.For( FilledZeroMargin, KinectSettings.DEPTH_HEIGHT - FilledZeroMargin, y => { int destIndex = y * KinectSettings.DEPTH_WIDTH; for (int x = 0; x < KinectSettings.DEPTH_WIDTH; ++x, ++destIndex) { int srcX = (int)(x * factor); int srcY = (int)(y * factor); int sourceColorIndex = (srcY * KinectSettings.COLOR_WIDTH) + srcX; this.resampledColorImagePixels[destIndex] = rawColorPixels[sourceColorIndex]; } }); } resampledColorFrame.CopyPixelDataFrom(this.resampledColorImagePixels); return(resampledColorFrame); }
private void ResetColorImage() { Array.Clear(resampledColorImagePixelsAlignedToDepth, 0, resampledColorImagePixelsAlignedToDepth.Length); resampledColorFrameDepthAligned.CopyPixelDataFrom(resampledColorImagePixelsAlignedToDepth); }