private void InitializeKinectFusion() { // KinecFusionの初期化 var volParam = new ReconstructionParameters( VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ ); volume = Reconstruction.FusionCreateReconstruction( volParam, ReconstructionProcessor.Amp, -1, Matrix4.Identity ); // 変換バッファの作成 depthFloatBuffer = new FusionFloatImageFrame( DepthWidth, DepthHeight ); pointCloudBuffer = new FusionPointCloudImageFrame( DepthWidth, DepthHeight ); shadedSurfaceColorFrame = new FusionColorImageFrame( DepthWidth, DepthHeight ); // リセット volume.ResetReconstruction( Matrix4.Identity ); }
/// <summary> /// Execute startup tasks /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null == this.sensor) { this.statusBarText.Text = Properties.Resources.NoKinectReady; return; } // Turn on the depth stream to receive depth frames this.sensor.DepthStream.Enable(DepthImageResolution); this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); this.frameDataLength = this.sensor.DepthStream.FramePixelDataLength; // Allocate space to put the color pixels we'll create this.colorPixels = new int[this.frameDataLength]; // This is the bitmap we'll display on-screen this.colorBitmap = new WriteableBitmap( (int)ImageSize.Width, (int)ImageSize.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data this.Image.Source = this.colorBitmap; // Add an event handler to be called whenever there is new depth frame data this.sensor.DepthFrameReady += this.SensorDepthFrameReady; this.sensor.ColorFrameReady += this.kinect_colorframe_ready; var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ); // Set the world-view transform to identity, so the world origin is the initial camera location. this.worldToCameraTransform = Matrix4.Identity; try { // This creates a volume cube with the Kinect at center of near plane, and volume directly // in front of Kinect. this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform); this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform(); if (this.translateResetPoseByMinDepthThreshold) { this.ResetReconstruction(); } } catch (InvalidOperationException ex) { this.statusBarText.Text = ex.Message; return; } catch (DllNotFoundException) { this.statusBarText.Text = this.statusBarText.Text = Properties.Resources.MissingPrerequisite; return; } // Depth frames generated from the depth input this.depthFloatBuffer = new FusionFloatImageFrame((int)ImageSize.Width, (int)ImageSize.Height); // Point cloud frames generated from the depth float input this.pointCloudBuffer = new FusionPointCloudImageFrame((int)ImageSize.Width, (int)ImageSize.Height); // Create images to raycast the Reconstruction Volume this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)ImageSize.Width, (int)ImageSize.Height); // Start the sensor! try { this.sensor.Start(); } catch (IOException ex) { // Device is in use this.sensor = null; this.statusBarText.Text = ex.Message; return; } catch (InvalidOperationException ex) { // Device is not valid, not supported or hardware feature unavailable this.sensor = null; this.statusBarText.Text = ex.Message; return; } // Set Near Mode by default try { this.sensor.DepthStream.Range = DepthRange.Near; checkBoxNearMode.IsChecked = true; } catch { // device not near mode capable } // Initialize and start the FPS timer this.fpsTimer = new DispatcherTimer(); this.fpsTimer.Tick += new EventHandler(this.FpsTimerTick); this.fpsTimer.Interval = new TimeSpan(0, 0, FpsInterval); this.fpsTimer.Start(); // Reset the reconstruction this.ResetReconstruction(); }
protected virtual void Dispose( bool disposing ) { if ( !disposed ) { if ( depthFloatBuffer != null ) { depthFloatBuffer.Dispose(); depthFloatBuffer = null; } if ( pointCloudBuffer != null ) { pointCloudBuffer.Dispose(); pointCloudBuffer = null; } if ( shadedSurfaceColorFrame != null ) { shadedSurfaceColorFrame.Dispose(); shadedSurfaceColorFrame = null; } if ( volume != null ) { volume.Dispose(); volume = null; } disposed = true; } }
/// <summary> /// Re-create the reconstruction object /// </summary> /// <returns>Indicate success or failure</returns> private bool RecreateReconstruction() { // Check if sensor has been initialized if (null == this.sensor) { return false; } if (null != this.volume) { this.volume.Dispose(); } try { // The zero-based GPU index to choose for reconstruction processing if the // ReconstructionProcessor AMP options are selected. // Here we automatically choose a device to use for processing by passing -1, int deviceIndex = -1; ReconstructionParameters volParam = new ReconstructionParameters(this.voxelsPerMeter, this.voxelsX, this.voxelsY, this.voxelsZ); // Set the world-view transform to identity, so the world origin is the initial camera location. this.worldToCameraTransform = Matrix4.Identity; this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, deviceIndex, this.worldToCameraTransform); this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform(); if (this.translateResetPoseByMinDepthThreshold) { this.ResetReconstruction(); } // Reset "Pause Integration" if (this.PauseIntegration) { this.PauseIntegration = false; } return true; } catch (ArgumentException) { this.volume = null; this.ShowStatusMessage(Properties.Resources.VolumeResolution); } catch (InvalidOperationException ex) { this.volume = null; this.ShowStatusMessage(ex.Message); } catch (DllNotFoundException) { this.volume = null; this.ShowStatusMessage(Properties.Resources.MissingPrerequisite); } catch (OutOfMemoryException) { this.volume = null; this.ShowStatusMessage(Properties.Resources.OutOfMemory); } return false; }