/// <summary>
        ///  The place where reconstruction is done. Creates floatDepthFrame, pointCloudFrame and surfaceFrame
        ///  to be used in ProcessFrame() function.
        /// </summary>
        /// <param name="sensor">The sensor.</param>
        public ReconstructionController(KinectSensor sensor)
        {
            Contract.Requires(sensor != null);

            this.syncContext = SynchronizationContext.Current;
            this.sensor      = sensor;

            var rparams = new ReconstructionParameters(128, 256, 256, 256);

            reconstruction              = Reconstruction.FusionCreateReconstruction(rparams, ReconstructionProcessor.Amp, -1, worldToCameraTransform);
            worldToVolumeTransform      = reconstruction.GetCurrentWorldToVolumeTransform();
            worldToVolumeTransform.M43 -= MIN_DEPTH * rparams.VoxelsPerMeter;
            reconstruction.ResetReconstruction(worldToCameraTransform, worldToVolumeTransform);

            var depthFrameDesc = sensor.DepthFrameSource.FrameDescription;

            var totalPixels = depthFrameDesc.Width * depthFrameDesc.Height;

            rawDepthData  = new ushort[totalPixels];
            bodyIndexData = new byte[totalPixels];
            SurfaceBitmap = new ThreadSafeBitmap(depthFrameDesc.Width, depthFrameDesc.Height);

            var intrinsics = sensor.CoordinateMapper.GetDepthCameraIntrinsics();
            var cparams    = new CameraParameters(
                intrinsics.FocalLengthX / depthFrameDesc.Width,
                intrinsics.FocalLengthY / depthFrameDesc.Height,
                intrinsics.PrincipalPointX / depthFrameDesc.Width,
                intrinsics.PrincipalPointY / depthFrameDesc.Height);

            floatDepthFrame = new FusionFloatImageFrame(depthFrameDesc.Width, depthFrameDesc.Height, cparams);
            pointCloudFrame = new FusionPointCloudImageFrame(depthFrameDesc.Width, depthFrameDesc.Height, cparams);
            surfaceFrame    = new FusionColorImageFrame(depthFrameDesc.Width, depthFrameDesc.Height, cparams);
        }
        private void SetupVariables()
        {
            mapper = sensor.CoordinateMapper;

            //Calculate integers used for allocation
            int depthImageSize = depthWidth * depthHeight;

            downsampledWidth  = depthWidth / DownsampleFactor;
            downsampledHeight = depthHeight / DownsampleFactor;
            int colorImageByteSize        = colorWidth * colorHeight * sizeof(int);
            int downsampledDepthImageSize = downsampledWidth * downsampledHeight;

            //Allocate frames
            resampledColorFrameDepthAligned = new FusionColorImageFrame(depthWidth, depthHeight);
            depthFloatFrame                              = new FusionFloatImageFrame(depthWidth, depthHeight);
            downsampledDepthFloatFrame                   = new FusionFloatImageFrame(downsampledWidth, downsampledHeight);
            downsampledSmoothDepthFloatFrame             = new FusionFloatImageFrame(downsampledWidth, downsampledHeight);
            downsampledDepthPointCloudFrame              = new FusionPointCloudImageFrame(downsampledWidth, downsampledHeight);
            downsampledRaycastPointCloudFrame            = new FusionPointCloudImageFrame(downsampledWidth, downsampledHeight);
            downsampledDeltaFromReferenceFrameColorFrame = new FusionColorImageFrame(downsampledWidth, downsampledHeight);

            //Allocate arrays
            depthImagePixels            = new ushort[depthImageSize];
            downsampledDepthImagePixels = new float[downsampledDepthImageSize];
            downsampledDeltaFromReferenceColorPixels = new int[downsampledDepthImageSize];
            deltaFromReferenceFramePixelsArgb        = new int[depthImageSize];
            colorCoordinates = new ColorSpacePoint[depthImageSize];
            resampledColorImagePixelsAlignedToDepth = new int[depthImageSize];
            depthVisibilityTestMapWidth             = colorWidth / ColorDownsampleFactor;
            depthVisibilityTestMapHeight            = colorHeight / ColorDownsampleFactor;
            depthVisibilityTestMap = new ushort[depthVisibilityTestMapWidth * depthVisibilityTestMapHeight];
            colorImagePixels       = new byte[colorImageByteSize];
        }
        protected virtual void Dispose(bool disposing)
        {
            if (!disposed)
            {
                if (depthFloatBuffer != null)
                {
                    depthFloatBuffer.Dispose();
                    depthFloatBuffer = null;
                }

                if (pointCloudBuffer != null)
                {
                    pointCloudBuffer.Dispose();
                    pointCloudBuffer = null;
                }

                if (shadedSurfaceColorFrame != null)
                {
                    shadedSurfaceColorFrame.Dispose();
                    shadedSurfaceColorFrame = null;
                }

                if (volume != null)
                {
                    volume.Dispose();
                    volume = null;
                }

                disposed = true;
            }
        }
Esempio n. 4
0
 public void RenderReconstruction(FusionColorImageFrame cf)
 {
     if (reconstructionRenderer != null)
     {
         reconstructionRenderer.Render(cf);
         LastRenderTimestamp = DateTime.UtcNow;
     }
 }
Esempio n. 5
0
        public FusionColorProcessor()
        {
            this.depthVisibilityTestMapWidth  = KinectSettings.COLOR_WIDTH / ColorDownsampleFactor;
            this.depthVisibilityTestMapHeight = KinectSettings.COLOR_HEIGHT / ColorDownsampleFactor;
            this.depthVisibilityTestMap       = new ushort[this.depthVisibilityTestMapWidth * this.depthVisibilityTestMapHeight];

            // Allocate the depth-color mapping points
            this.colorCoordinates                   = new ColorSpacePoint[KinectSettings.DEPTH_PIXEL_COUNT];
            ResampledColorFrameDepthAligned         = new FusionColorImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT);
            resampledColorImagePixelsAlignedToDepth = new int[KinectSettings.DEPTH_PIXEL_COUNT];
        }
Esempio n. 6
0
        public DeltaCalculator(Engine e)
        {
            this.engine = e;
            // Create float pixel array
            this.DeltaFromReferenceFrameFloatPixels = new float[KinectSettings.DEPTH_PIXEL_COUNT];

            // Create colored pixel array of correct format
            DeltaFromReferenceFramePixelsArgb = new int[KinectSettings.DEPTH_PIXEL_COUNT];

            //Downsampled Storage
            DownsampledDeltaFromReferenceFrameColorFrame = new FusionColorImageFrame(e.Resampler.DownsampledWidth, e.Resampler.DownsampledHeight);
            DownsampledDeltaFromReferenceColorPixels     = new int[e.Resampler.DownsampledWidth * e.Resampler.DownsampledHeight];
        }
        private void InitializeKinectFusion()
        {
            // KinecFusionの初期化
            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            volume = Reconstruction.FusionCreateReconstruction(volParam, ReconstructionProcessor.Amp, -1, Matrix4.Identity);

            // 変換バッファの作成
            depthFloatBuffer        = new FusionFloatImageFrame(DepthWidth, DepthHeight);
            pointCloudBuffer        = new FusionPointCloudImageFrame(DepthWidth, DepthHeight);
            shadedSurfaceColorFrame = new FusionColorImageFrame(DepthWidth, DepthHeight);

            // リセット
            volume.ResetReconstruction(Matrix4.Identity);
        }
Esempio n. 8
0
        /// <summary>
        /// Process input color image to make it equal in size to the depth image
        /// </summary>
        private unsafe FusionColorImageFrame ProcessColorForCameraPoseFinder(byte[] colorImagePixels)
        {
            var resampledColorFrame = new FusionColorImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT);

            if (KinectSettings.DEPTH_WIDTH != RawDepthWidth || KinectSettings.DEPTH_HEIGHT != RawDepthHeight ||
                KinectSettings.COLOR_WIDTH != RawColorWidth || KinectSettings.COLOR_HEIGHT != RawColorHeight)
            {
                logger.Log(LogLevel.Error, "Cannot perform ProcessColorForCameraPoseFinder. Dimensions don't agree.");
                return(new FusionColorImageFrame(0, 0));
            }

            float     factor           = RawColorWidth / RawDepthHeightWithSpecialRatio;
            const int FilledZeroMargin = (RawDepthHeight - RawDepthHeightWithSpecialRatio) / 2;

            // Here we make use of unsafe code to just copy the whole pixel as an int for performance reasons, as we do
            // not need access to the individual rgba components.
            fixed(byte *ptrColorPixels = colorImagePixels)
            {
                int *rawColorPixels = (int *)ptrColorPixels;

                Parallel.For(
                    FilledZeroMargin,
                    KinectSettings.DEPTH_HEIGHT - FilledZeroMargin,
                    y =>
                {
                    int destIndex = y * KinectSettings.DEPTH_WIDTH;

                    for (int x = 0; x < KinectSettings.DEPTH_WIDTH; ++x, ++destIndex)
                    {
                        int srcX             = (int)(x * factor);
                        int srcY             = (int)(y * factor);
                        int sourceColorIndex = (srcY * KinectSettings.COLOR_WIDTH) + srcX;

                        this.resampledColorImagePixels[destIndex] = rawColorPixels[sourceColorIndex];
                    }
                });
            }

            resampledColorFrame.CopyPixelDataFrom(this.resampledColorImagePixels);
            return(resampledColorFrame);
        }
Esempio n. 9
0
        void InitializeFusion()
        {
            // Reconstruction Parameters
            float voxelPerMeter = 256;
            int   voxelsX       = 512;
            int   voxelsY       = 384;
            int   voxelsZ       = 512;

            reconstructionParameters = new ReconstructionParameters(voxelPerMeter, voxelsX, voxelsY, voxelsZ);

            //カメラ座標の初期値をワールド座標に設定
            worldToCameraTransform = Matrix4.Identity;

            //FusionのReconstructionオブジェクトを作成
            reconstruction = ColorReconstruction.FusionCreateReconstruction(reconstructionParameters, ReconstructionProcessor.Amp, -1, worldToCameraTransform);

            // Fusionのイメージフレームを作成
            cameraParameters      = CameraParameters.Defaults;
            depthImageFrame       = new FusionFloatImageFrame(depthWidth, depthHeight, cameraParameters);
            smoothDepthImageFrame = new FusionFloatImageFrame(depthWidth, depthHeight, cameraParameters);
            colorImageFrame       = new FusionColorImageFrame(depthWidth, depthHeight, cameraParameters);
            pointCloudImageFrame  = new FusionPointCloudImageFrame(depthWidth, depthHeight, cameraParameters);
            surfaceImageFrame     = new FusionColorImageFrame(depthWidth, depthHeight, cameraParameters);
        }
Esempio n. 10
0
 public VolumeRenderer(Engine e)
 {
     this.engine               = e;
     ShadedSurfaceFrame        = new FusionColorImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT);
     ShadedSurfaceNormalsFrame = new FusionColorImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT);
 }
Esempio n. 11
0
        public void Evaluate(int SpreadMax)
        {
            this.VoxelResolutionX = this.FInVX[0];
            this.VoxelResolutionY = this.FInVY[0];
            this.VoxelResolutionZ = this.FInVZ[0];
            this.VoxelsPerMeter   = this.FInVPM[0];

            if (this.FTextureOutput[0] == null)
            {
                this.FTextureOutput[0] = new DX11Resource <DX11DynamicTexture2D>();
            }
            if (this.FPCOut[0] == null)
            {
                this.FPCOut[0] = new DX11Resource <IDX11ReadableStructureBuffer>();
            }
            if (this.FGeomOut[0] == null)
            {
                this.FGeomOut[0] = new DX11Resource <DX11IndexedGeometry>();
            }

            if (this.FOutVoxels[0] == null)
            {
                this.FOutVoxels[0] = new DX11Resource <IDX11ReadableStructureBuffer>();
            }

            if (this.FInExport[0])
            {
                this.FGeomOut[0].Dispose(); this.FGeomOut[0] = new DX11Resource <DX11IndexedGeometry>();
            }

            if (this.FInvalidateConnect)
            {
                this.FInvalidateConnect = false;

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    this.runtime = this.FInRuntime[0];
                    this.runtime.DepthFrameReady += this.runtime_DepthFrameReady;



                    // Depth frames generated from the depth input
                    this.depthFloatBuffer = new FusionFloatImageFrame(width, height);

                    // Point cloud frames generated from the depth float input
                    this.pointCloudBuffer = new FusionPointCloudImageFrame(width, height);

                    // Create images to raycast the Reconstruction Volume
                    this.shadedSurfaceColorFrame = new FusionColorImageFrame(width, height);
                }
            }

            if (this.FInVPM.IsChanged || this.FInVX.IsChanged || this.FInVY.IsChanged || this.FInVZ.IsChanged)
            {
                if (this.volume != null)
                {
                    this.volume.Dispose();
                }

                var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);
                this.worldToCameraTransform = Matrix4.Identity;

                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, 0, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                this.ResetReconstruction();
            }

            if (this.runtime != null)
            {
                bool needreset = this.FInReset[0];

                if (needreset)
                {
                    this.ResetReconstruction();
                }
            }
        }
Esempio n. 12
0
        public VolumeBuilder(Scanner source, Dispatcher dispatcher)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }

            this.source     = source;
            this.dispatcher = dispatcher;

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            // Map world X axis to blue channel, Y axis to green channel and Z axis to red channel,
            // normalizing each to the range [0, 1]. We also add a shift of 0.5 to both X,Y channels
            // as the world origin starts located at the center of the front face of the volume,
            // hence we need to map negative x,y world vertex locations to positive color values.
            this.worldToBGRTransform     = Matrix4.Identity;
            this.worldToBGRTransform.M11 = settings.VoxelsPerMeter / settings.VoxelsX;
            this.worldToBGRTransform.M22 = settings.VoxelsPerMeter / settings.VoxelsY;
            this.worldToBGRTransform.M33 = settings.VoxelsPerMeter / settings.VoxelsZ;
            this.worldToBGRTransform.M41 = 0.5f;
            this.worldToBGRTransform.M42 = 0.5f;
            this.worldToBGRTransform.M44 = 1.0f;

            var volumeParameters = new ReconstructionParameters(settings.VoxelsPerMeter, settings.VoxelsX, settings.VoxelsY, settings.VoxelsZ);

            this.volume = ColorReconstruction.FusionCreateReconstruction(volumeParameters, ReconstructionProcessor.Amp, -1, this.worldToCameraTransform);

            var depthWidth  = this.source.Frame.DepthWidth;
            var depthHeight = this.source.Frame.DepthHeight;
            var depthSize   = depthWidth * depthHeight;

            this.depthFloatFrame                 = new FusionFloatImageFrame(depthWidth, depthHeight);
            this.smoothDepthFloatFrame           = new FusionFloatImageFrame(depthWidth, depthHeight);
            this.resampledColorFrame             = new FusionColorImageFrame(depthWidth, depthHeight);
            this.resampledColorFrameDepthAligned = new FusionColorImageFrame(depthWidth, depthHeight);
            this.deltaFromReferenceFrame         = new FusionFloatImageFrame(depthWidth, depthHeight);
            this.shadedSurfaceFrame              = new FusionColorImageFrame(depthWidth, depthHeight);
            this.raycastPointCloudFrame          = new FusionPointCloudImageFrame(depthWidth, depthHeight);
            this.depthPointCloudFrame            = new FusionPointCloudImageFrame(depthWidth, depthHeight);

            var downsampledDepthWidth  = depthWidth / settings.DownsampleFactor;
            var downsampledDepthHeight = depthHeight / settings.DownsampleFactor;
            var downsampledDepthSize   = downsampledDepthWidth * downsampledDepthHeight;

            this.downsampledDepthFloatFrame                   = new FusionFloatImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledSmoothDepthFloatFrame             = new FusionFloatImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledRaycastPointCloudFrame            = new FusionPointCloudImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledDepthPointCloudFrame              = new FusionPointCloudImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledDeltaFromReferenceFrameColorFrame = new FusionColorImageFrame(downsampledDepthWidth, downsampledDepthHeight);

            this.resampledColorData   = new int[depthSize];
            this.downsampledDepthData = new float[downsampledDepthSize];
            this.downsampledDeltaFromReferenceColorPixels = new int[downsampledDepthSize];
            this.deltaFromReferenceFramePixelsArgb        = new int[depthSize];
            this.shadedSurfaceFramePixelsArgb             = new int[this.shadedSurfaceFrame.PixelDataLength];

            this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

            this.volumeBitmap = new WriteableBitmap(depthWidth, depthHeight, settings.DefaultSystemDPI, settings.DefaultSystemDPI, PixelFormats.Bgr32, null);

            // Create a camera pose finder with default parameters
            this.cameraPoseFinder = CameraPoseFinder.FusionCreateCameraPoseFinder(CameraPoseFinderParameters.Defaults);

            this.workerThread = new Thread(WorkerThreadProc);
            this.workerThread.Start();
            this.source.Frame.OnDataUpdate += OnFrameDataUpdate;
        }
Esempio n. 13
0
        private void InitFusion()
        {
            if (_isFusionInitialized)
            {
                return;
            }

            _isFusionInitialized = true;

            var depthFormat = KinectSensor.DepthStream.Format;
            var depthSize   = FormatHelper.GetDepthSize(depthFormat);

            _fusionWorkItemPool = new Pool <FusionWorkItem, DepthImageFormat>(5, depthFormat, FusionWorkItem.Create);

            _fusionWorkQueue = new WorkQueue <FusionWorkItem>(ProcessFusionFrameBackground)
            {
                CanceledCallback = ReturnFusionWorkItem,
                MaxQueueLength   = 2
            };

            this.frameDataLength = KinectSensor.DepthStream.FramePixelDataLength;

            // Allocate space to put the color pixels we'll create
            this.colorPixels = new int[(int)(depthSize.Width * 2 * depthSize.Height * 2)];

            // This is the bitmap we'll display on-screen
            this.colorFusionBitmap = new WriteableBitmap(
                (int)depthSize.Width * 2,
                (int)depthSize.Height * 2,
                96.0,
                96.0,
                PixelFormats.Bgr32,
                null);
            FusionOutputImage = colorFusionBitmap;


            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            try
            {
                // This creates a volume cube with the Kinect at center of near plane, and volume directly
                // in front of Kinect.
                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                if (this.translateResetPose)
                {
                    this.ResetReconstruction(_currentVolumeCenter);
                }
            }
            catch (ArgumentException)
            {
                FusionStatusMessage = "ArgumentException - DX11 GPU not found?";
                return;
            }
            catch (InvalidOperationException ex)
            {
                FusionStatusMessage = ex.Message;
                return;
            }
            catch (DllNotFoundException)
            {
                FusionStatusMessage = Properties.Resources.MissingPrerequisite;
                return;
            }

            // Depth frames generated from the depth input
            this.depthFloatBuffer    = new FusionFloatImageFrame((int)depthSize.Width, (int)depthSize.Height);
            this.residualFloatBuffer = new FusionFloatImageFrame((int)depthSize.Width, (int)depthSize.Height);
            _residualData            = new float[(int)(depthSize.Width * depthSize.Height)];

            // Point cloud frames generated from the depth float input
            this.pointCloudBuffer = new FusionPointCloudImageFrame((int)depthSize.Width * 2, (int)depthSize.Height * 2);

            // Create images to raycast the Reconstruction Volume
            this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)depthSize.Width * 2, (int)depthSize.Height * 2);

            // Reset the reconstruction
            this.ResetReconstruction(_currentVolumeCenter);

            _audioManager.Start();
        }
Esempio n. 14
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
                return;
            }

            // Turn on the depth stream to receive depth frames
            this.sensor.DepthStream.Enable(DepthImageResolution);

            this.frameDataLength = this.sensor.DepthStream.FramePixelDataLength;

            // Allocate space to put the color pixels we'll create
            this.colorPixels = new int[this.frameDataLength];

            // This is the bitmap we'll display on-screen
            this.colorBitmap = new WriteableBitmap(
                (int)ImageSize.Width,
                (int)ImageSize.Height,
                96.0,
                96.0,
                PixelFormats.Bgr32,
                null);

            // Set the image we display to point to the bitmap where we'll put the image data
            this.Image.Source = this.colorBitmap;

            // Add an event handler to be called whenever there is new depth frame data
            this.sensor.DepthFrameReady += this.SensorDepthFrameReady;

            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            try
            {
                // This creates a volume cube with the Kinect at center of near plane, and volume directly
                // in front of Kinect.
                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                if (this.translateResetPoseByMinDepthThreshold)
                {
                    this.ResetReconstruction();
                }
            }
            catch (InvalidOperationException ex)
            {
                this.statusBarText.Text = ex.Message;
                return;
            }
            catch (DllNotFoundException)
            {
                this.statusBarText.Text = this.statusBarText.Text = Properties.Resources.MissingPrerequisite;
                return;
            }

            // Depth frames generated from the depth input
            this.depthFloatBuffer = new FusionFloatImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Point cloud frames generated from the depth float input
            this.pointCloudBuffer = new FusionPointCloudImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Create images to raycast the Reconstruction Volume
            this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Start the sensor!
            try
            {
                this.sensor.Start();
            }
            catch (IOException ex)
            {
                // Device is in use
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }

            // Set Near Mode by default
            try
            {
                this.sensor.DepthStream.Range = DepthRange.Near;
                checkBoxNearMode.IsChecked    = true;
            }
            catch
            {
                // device not near mode capable
            }

            // Initialize and start the FPS timer
            this.fpsTimer          = new DispatcherTimer();
            this.fpsTimer.Tick    += new EventHandler(this.FpsTimerTick);
            this.fpsTimer.Interval = new TimeSpan(0, 0, FpsInterval);

            this.fpsTimer.Start();

            // Reset the reconstruction
            this.ResetReconstruction();
        }
Esempio n. 15
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Check to ensure suitable DirectX11 compatible hardware exists before initializing Kinect Fusion
            try
            {
                string deviceDescription  = string.Empty;
                string deviceInstancePath = string.Empty;
                int    deviceMemory       = 0;

                FusionDepthProcessor.GetDeviceInfo(
                    ProcessorType, DeviceToUse, out deviceDescription, out deviceInstancePath, out deviceMemory);
            }
            catch (IndexOutOfRangeException)
            {
                // Thrown when index is out of range for processor type or there is no DirectX11 capable device installed.
                // As we set -1 (auto-select default) for the DeviceToUse above, this indicates that there is no DirectX11
                // capable device. The options for users in this case are to either install a DirectX11 capable device
                // (see documentation for recommended GPUs) or to switch to non-real-time CPU based reconstruction by
                // changing ProcessorType to ReconstructionProcessor.Cpu
                this.statusBarText.Text = Properties.Resources.NoDirectX11CompatibleDeviceOrInvalidDeviceIndex;
                return;
            }
            catch (DllNotFoundException)
            {
                this.statusBarText.Text = Properties.Resources.MissingPrerequisite;
                return;
            }
            catch (InvalidOperationException ex)
            {
                this.statusBarText.Text = ex.Message;
                return;
            }

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
                return;
            }

            // Turn on the depth stream to receive depth frames
            this.sensor.DepthStream.Enable(DepthImageResolution);

            this.frameDataLength = this.sensor.DepthStream.FramePixelDataLength;

            // Create local depth pixels buffer
            this.depthImagePixels = new DepthImagePixel[this.frameDataLength];

            // Allocate space to put the color pixels we'll create
            this.colorPixels = new int[this.frameDataLength];

            // This is the bitmap we'll display on-screen
            this.colorBitmap = new WriteableBitmap(
                (int)ImageSize.Width,
                (int)ImageSize.Height,
                96.0,
                96.0,
                PixelFormats.Bgr32,
                null);

            // Set the image we display to point to the bitmap where we'll put the image data
            this.Image.Source = this.colorBitmap;

            // Add an event handler to be called whenever there is new depth frame data
            this.sensor.DepthFrameReady += this.SensorDepthFrameReady;

            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            try
            {
                // This creates a volume cube with the Kinect at center of near plane, and volume directly
                // in front of Kinect.
                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                if (this.translateResetPoseByMinDepthThreshold)
                {
                    // Reset the reconstruction if we need to add a custom world-volume transformation
                    this.ResetReconstruction();
                }
            }
            catch (InvalidOperationException ex)
            {
                this.statusBarText.Text = ex.Message;
                return;
            }
            catch (DllNotFoundException)
            {
                this.statusBarText.Text = this.statusBarText.Text = Properties.Resources.MissingPrerequisite;
                return;
            }

            // Depth frames generated from the depth input
            this.depthFloatBuffer = new FusionFloatImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Point cloud frames generated from the depth float input
            this.pointCloudBuffer = new FusionPointCloudImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Create images to raycast the Reconstruction Volume
            this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Start the sensor!
            try
            {
                this.sensor.Start();
            }
            catch (IOException ex)
            {
                // Device is in use
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }

            // Set Near Mode by default
            try
            {
                this.sensor.DepthStream.Range   = DepthRange.Near;
                this.checkBoxNearMode.IsChecked = true;
            }
            catch (InvalidOperationException)
            {
                // Near mode not supported on device, silently fail during initialization
                this.checkBoxNearMode.IsEnabled = false;
            }

            // Initialize and start the FPS timer
            this.fpsTimer          = new DispatcherTimer();
            this.fpsTimer.Tick    += new EventHandler(this.FpsTimerTick);
            this.fpsTimer.Interval = new TimeSpan(0, 0, FpsInterval);

            this.fpsTimer.Start();

            this.lastFPSTimestamp = DateTime.UtcNow;
        }
Esempio n. 16
0
        void InitializeFusion()
        {
            // Reconstruction Parameters
            float voxelPerMeter = 256;
            int voxelsX = 512;
            int voxelsY = 384;
            int voxelsZ = 512;
            reconstructionParameters = new ReconstructionParameters( voxelPerMeter, voxelsX, voxelsY, voxelsZ );

            //カメラ座標の初期値をワールド座標に設定
            worldToCameraTransform = Matrix4.Identity;

            //FusionのReconstructionオブジェクトを作成
            reconstruction = ColorReconstruction.FusionCreateReconstruction( reconstructionParameters, ReconstructionProcessor.Amp, -1, worldToCameraTransform );

            // Fusionのイメージフレームを作成
            cameraParameters = CameraParameters.Defaults;
            depthImageFrame = new FusionFloatImageFrame( depthWidth, depthHeight, cameraParameters );
            smoothDepthImageFrame = new FusionFloatImageFrame( depthWidth, depthHeight, cameraParameters );
            colorImageFrame = new FusionColorImageFrame( depthWidth, depthHeight, cameraParameters );
            pointCloudImageFrame = new FusionPointCloudImageFrame( depthWidth, depthHeight, cameraParameters );
            surfaceImageFrame = new FusionColorImageFrame( depthWidth, depthHeight, cameraParameters );
        }
Esempio n. 17
0
        public void Evaluate(int SpreadMax)
        {
            this.VoxelResolutionX = this.FInVX[0];
            this.VoxelResolutionY = this.FInVY[0];
            this.VoxelResolutionZ = this.FInVZ[0];
            this.VoxelsPerMeter = this.FInVPM[0];

            if (this.FTextureOutput[0] == null) { this.FTextureOutput[0] = new DX11Resource<DX11DynamicTexture2D>(); }
            if (this.FPCOut[0] == null) { this.FPCOut[0] = new DX11Resource<IDX11ReadableStructureBuffer>(); }
            if (this.FGeomOut[0] == null) { this.FGeomOut[0] = new DX11Resource<DX11IndexedGeometry>(); }

            if (this.FOutVoxels[0] == null) { this.FOutVoxels[0] = new DX11Resource<IDX11ReadableStructureBuffer>(); }

            if (this.FInExport[0]) { this.FGeomOut[0].Dispose(); this.FGeomOut[0] = new DX11Resource<DX11IndexedGeometry>(); }

            if (this.FInvalidateConnect)
            {
                this.FInvalidateConnect = false;

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    this.runtime = this.FInRuntime[0];
                    this.runtime.DepthFrameReady += this.runtime_DepthFrameReady;

                    // Depth frames generated from the depth input
                    this.depthFloatBuffer = new FusionFloatImageFrame(width, height);

                    // Point cloud frames generated from the depth float input
                    this.pointCloudBuffer = new FusionPointCloudImageFrame(width, height);

                    // Create images to raycast the Reconstruction Volume
                    this.shadedSurfaceColorFrame = new FusionColorImageFrame(width, height);

                }
            }

            if (this.FInVPM.IsChanged || this.FInVX.IsChanged || this.FInVY.IsChanged || this.FInVZ.IsChanged)
            {
                if (this.volume != null)
                {
                    this.volume.Dispose();
                }

                var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);
                this.worldToCameraTransform = Matrix4.Identity;

                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, 0, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                this.ResetReconstruction();
            }

            if (this.runtime != null)
            {
                bool needreset = this.FInReset[0];

                if (needreset) { this.ResetReconstruction(); }
            }
        }
Esempio n. 18
0
        /// <summary>
        /// processes the depth data package into the kinect fusion volume
        /// </summary>
        /// <param name="pKdp">the data package</param>
        void processDepthData(KinectDataPackage pKdp, System.Threading.CancellationToken pCancelToken)
        {
            lock (canWorkLock)
            {
                Log.LogManager.updateAlgorithmStatus("Kinect Fusion integration");
                this.volume.ResetReconstruction(Matrix4.Identity);

                int picturesIntegrated = 0;
                foreach (ushort[] pDepth in pKdp.rawDepthData)
                {
                    pCancelToken.ThrowIfCancellationRequested();
                    WriteableBitmap            bitmap                  = new WriteableBitmap(this.depthFloatFrame.Width, this.depthFloatFrame.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                    FusionFloatImageFrame      depthFloatBuffer        = new FusionFloatImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    FusionPointCloudImageFrame pointCloudBuffer        = new FusionPointCloudImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    FusionColorImageFrame      shadedSurfaceColorFrame = new FusionColorImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    int[] voxelPixels = new int[this.depthFloatFrame.Width * this.depthFloatFrame.Height];

                    this.volume.DepthToDepthFloatFrame(
                        pDepth,
                        depthFloatBuffer,
                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.minDepthClip,
                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.maxDepthClip,
                        false);
                    float alignmentValue;
                    bool  trackingSucceeded = this.volume.ProcessFrame(depthFloatBuffer,
                                                                       Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.iterationCount,
                                                                       Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.integrationWeight,
                                                                       out alignmentValue,
                                                                       volume.GetCurrentWorldToCameraTransform());

                    // If camera tracking failed, no data integration or raycast for reference
                    // point cloud will have taken place, and the internal camera pose
                    // will be unchanged.
                    if (!trackingSucceeded)
                    {
                        trackingErrorCount++;
                    }
                    else
                    {
                        Matrix4 calculatedCameraPose = volume.GetCurrentWorldToCameraTransform();

                        // Set the camera pose and reset tracking errors
                        worldToCameraTransform = calculatedCameraPose;
                        trackingErrorCount     = 0;
                    }

                    // Calculate the point cloud
                    volume.CalculatePointCloud(pointCloudBuffer, worldToCameraTransform);

                    // Shade point cloud and render
                    FusionDepthProcessor.ShadePointCloud(
                        pointCloudBuffer,
                        worldToCameraTransform,
                        null,
                        shadedSurfaceColorFrame
                        );

                    shadedSurfaceColorFrame.CopyPixelDataTo(voxelPixels);

                    bitmap.WritePixels(
                        new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight),
                        voxelPixels,
                        bitmap.PixelWidth * sizeof(int),
                        0);

                    bitmap.Freeze();
                    OnNewFusionPictureEvent.BeginInvoke(pKdp.usedConfig.ID, bitmap, null, null);

                    picturesIntegrated++;
                    Log.LogManager.writeLogDebug("[DataIntegration:Reconstruction] " + picturesIntegrated + " of " + pKdp.rawDepthData.Count + " Pictures integrated");
                }

                //if request was calibration request, export meshes
                if (pKdp.usedConfig.clientRequestObject.requestType == ClientConfigObject.RequestType.calibration)
                {
                    exportMesh(volume, pKdp, false);
                    Log.LogManager.writeLog("[DataIntegration:Reconstruction] Mesh of " + pKdp.usedConfig.name + " exported.");
                    return;
                }

                //broadcast new point cloud
                PointCloud p = new PointCloud(volume);
                p.ConfigObject = pKdp.usedConfig;
                OnNewPointCloudEvent.BeginInvoke(p, null, null);
                Log.LogManager.writeLog("[DataIntegration:Reconstruction] All pictures of " + pKdp.usedConfig.name + " integrated");
                Log.LogManager.updateAlgorithmStatus("Done");
            }
        }