public int setupKinect(Model mod, Image img) { theModel = mod; theImage = img; foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; this.foundSensor = true; break; } } if (this.foundSensor == false) { return -1; } this.setupSkeleton(); this.setupColor(); this.setupDepth(); this.sensor.Start(); this.setupAudio(); this.sensor.AllFramesReady += sensor_AllFramesReady; return 0; }
void KinectSensorsStatusChanged(object sender, StatusChangedEventArgs e) { switch (e.Status) { case KinectStatus.Disconnected: if (_kinectSensor == e.Sensor) { Clean(); Message = "Kinect disconnected"; } break; case KinectStatus.Connected: _kinectSensor = e.Sensor; Initialize(); break; case KinectStatus.NotPowered: Message = "Kinect is not powered"; Clean(); break; case KinectStatus.NotReady: Message = "Kinect is not ready"; break; case KinectStatus.Initializing: Message = "Initializing"; break; default: Message = string.Concat("Status: ", e.Status); break; } }
public KinectDance(double layoutHeight, double layoutWidth, List<TextBlock> menus, Style mouseOverStyle, Border menuBorder,TextBox debugBox = null) { _layoutHeight = layoutHeight; _layoutWidth = layoutWidth; _debugBox = debugBox; _menus = menus; _menuBorder = menuBorder; _mouseOverStyle = mouseOverStyle; _kinect = KinectSensor.KinectSensors.FirstOrDefault(); if (_kinect == null) return; //_kinect.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; _kinect.Start(); _kinect.ColorStream.Enable(); _kinect.SkeletonStream.Enable(new TransformSmoothParameters { Smoothing = 0.7f, Correction = 0.3f, Prediction = 0.4f, JitterRadius = 0.5f, MaxDeviationRadius = 0.5f }); _kinect.SkeletonFrameReady += kinect_SkeletonFrameReady; }
public KinectHandler() { instance = this; kinectSensor = KinectSensor.GetDefault(); kinectSensor.CoordinateMapper.CoordinateMappingChanged += CoordinateMapper_CoordinateMappingChanged; kinectSensor.Open(); }
private void startKinectService(KinectSensorManager kinectSensorManager, Microsoft.Kinect.KinectSensor pluggedKinectSensor) { pluggedKinectSensor.SkeletonFrameReady += this.skeletonTracking; kinectSensorManager.SkeletonStreamEnabled = true; kinectSensorManager.KinectSensorEnabled = true; }
void myChooser_KinectChanged(object sender, KinectChangedEventArgs e) { if (null != e.OldSensor) { //Alten Kinect deaktivieren if (mySensor != null) { mySensor.Dispose(); } } if (null != e.NewSensor) { mySensor = e.NewSensor; mySensor.ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30); myColorArray = new byte[this.mySensor.ColorStream.FramePixelDataLength]; myBitmap = new WriteableBitmap(this.mySensor.DepthStream.FrameWidth, this.mySensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Gray16, null); image1.Source = myBitmap; mySensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(mySensor_AllFramesReady); try { this.mySensor.Start(); SensorChooserUI.Visibility = Visibility.Hidden; } catch (IOException) { this.mySensor = null; } } }
/// <summary> /// 距離データをカラー画像に変換する /// </summary> /// <param name="kinect"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] ConvertDepthColor( KinectSensor kinect, DepthImageFrame depthFrame ) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo( depthPixel ); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame( depthStream.Format, depthPixel, colorStream.Format, colorPoint ); byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel]; for ( int index = 0; index < depthPixel.Length; index++ ) { // 距離カメラのデータから、プレイヤーIDと距離を取得する int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask; int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth; // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min( colorPoint[index].X, colorStream.FrameWidth - 1 ); int y = Math.Min( colorPoint[index].Y, colorStream.FrameHeight - 1 ); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; if ( player != 0 ) { depthColor[colorIndex] = 255; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 255; } else { // サポート外 0-40cm if ( distance == depthStream.UnknownDepth ) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 255; } // 近すぎ 40cm-80cm(default mode) else if ( distance == depthStream.TooNearDepth ) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 0; } // 遠すぎ 3m(Near),4m(Default)-8m else if ( distance == depthStream.TooFarDepth ) { depthColor[colorIndex] = 255; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 0; } // 有効な距離データ else { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 255; } } } return depthColor; }
void Cage4_Loaded(object sender, RoutedEventArgs e) { foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { this.sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); this.cimg_cage4.Source = this.colorBitmap; this.sensor.AllFramesReady += this.sensor_AllFramesReady; try { this.sensor.Start(); } catch (IOException) { //this.sensor = null; } } }
private void Start_Click(object sender, RoutedEventArgs e) { if (this.StartStopButton.Content.ToString() == ButtonStartText) { if (KinectSensor.KinectSensors.Any()) { KinectSensor.KinectSensors.StatusChanged += (o, args) => { this.Status.Content = args.Status.ToString(); }; sensor = KinectSensor.KinectSensors.First(); } sensor.Start(); sensor.ElevationAngle = 0; sensor.ColorStream.Enable(); sensor.DepthStream.Enable(); sensor.SkeletonStream.Enable(); sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Default; // Default is standing sensor.AllFramesReady += sensorAllFramesReady; this.ConnectionID.Content = sensor.DeviceConnectionId; this.StartStopButton.Content = ButtonStopText; } else { if (sensor != null && sensor.IsRunning) { sensor.Stop(); StartStopButton.Content = ButtonStartText; } } }
public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public void initialize( int elevationAngle = 0 ) { try { kinectSensor = KinectSensor.KinectSensors[0]; } catch (Exception e) { Console.WriteLine("kinect not detected, continuing with kinect disabled {0}",e); return; } if (kinectSensor == null) { return; } // limits elevation angle to keep the motors from trying too extreme an angle if (elevationAngle >= 26 ) { elevationAngle = 26; } else if (elevationAngle <= -26) { elevationAngle = -26; } // Only initializes Skeletal Tracking kinectSensor.SkeletonStream.Enable(); // set a call back function to process skeleton data kinectSensor.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(kinectSkeletonFrameReadyCallback); kinectSensor.Start(); kinectSensor.ElevationAngle = elevationAngle; }
void myChooser_KinectChanged(object sender, KinectChangedEventArgs e) { if (null != e.OldSensor) { //Alten Kinect deaktivieren if (mySensor != null) { mySensor.Dispose(); } } if (null != e.NewSensor) { mySensor = e.NewSensor; mySensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); myDArray1 = new short[this.mySensor.DepthStream.FramePixelDataLength]; myDArray2 = new short[this.mySensor.DepthStream.FramePixelDataLength]; myDArray3 = new short[this.mySensor.DepthStream.FramePixelDataLength]; myHistoArray = new int[50]; myFinalArray = new short[this.mySensor.DepthStream.FramePixelDataLength]; mySensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(mySensor_AllFramesReady); try { this.mySensor.Start(); SensorChooserUI.Visibility = Visibility.Hidden; } catch (IOException) { this.mySensor = null; } } }
static void Main() { Init(); try { logic = new Logic(); sensor = KinectSensor.KinectSensors[0]; sensor.Start(); sensor.SkeletonStream.Enable(); sensor.SkeletonStream.EnableTrackingInNearRange = true; timer = new Timer(); timer.Interval = 500; timer.Elapsed += Timer_Elapsed; timer.Enabled = true; } catch (Exception e) { Console.WriteLine(e); } Console.ReadLine(); }
public KinectCoreV2(ref KinectBase.MasterSettings settings, bool isGUILaunched, int kinectNumber) { masterSettings = settings; dynamic temp = masterSettings.kinectOptionsList[kinectNumber]; masterKinectSettings = (KinectV2Settings)temp; //TODO: Update this to open a specific Kinect v2, if the SDK is ever updated to support multiple on one machine kinect = KinectSensor.GetDefault(); kinectID = kinectNumber; uint tempC = kinect.ColorFrameSource.FrameDescription.LengthInPixels; uint tempD = kinect.DepthFrameSource.FrameDescription.LengthInPixels; uint tempI = kinect.InfraredFrameSource.FrameDescription.LengthInPixels; colorImagePool = new KinectBase.ObjectPool<byte[]>(() => new byte[tempC * 4]); depthImagePool = new KinectBase.ObjectPool<byte[]>(() => new byte[tempD * 4]); irImagePool = new KinectBase.ObjectPool<byte[]>(() => new byte[tempI * sizeof(UInt16)]); if (isGUILaunched) { isGUI = true; LaunchKinect(); } else { launchKinectDelegate kinectDelegate = LaunchKinect; IAsyncResult result = kinectDelegate.BeginInvoke(null, null); kinectDelegate.EndInvoke(result); //Even though this is blocking, the events should be on a different thread now. } }
/// <summary> /// Simply a empty Constructor /// </summary> public MathiasCore() { CONNECTED = false; KinectAudio = new KinectAudioController(); kinect = KinectSensor.GetDefault(); dbManager = new DBManager(); }
private void initKinect() { if (KinectSensor.KinectSensors.Count > 0) kinect = KinectSensor.KinectSensors[0]; else { info.Content = "Pas de kinect connectée"; } try { kinect.SkeletonStream.Enable(); kinect.DepthStream.Enable(); interStream = new InteractionStream(kinect, new DummyInteraction()); kinect.DepthFrameReady += KinectOnDepthFrameReady; kinect.SkeletonFrameReady += KinectOnSkeletonFrameReady; interStream.InteractionFrameReady += interStream_InteractionFrameReady; kinect.Start(); } catch(Exception e) { // throw e; } }
private void startKinect() { if (KinectSensor.KinectSensors.Count > 0) { // Choose the first Kinect device kinect = KinectSensor.KinectSensors[0]; if (kinect == null) return; kinect.ColorStream.Enable(); var tsp = new TransformSmoothParameters { Smoothing = 0.5f, Correction = 0.5f, Prediction = 0.5f, JitterRadius = 0.05f, MaxDeviationRadius = 0.04f }; kinect.SkeletonStream.Enable(tsp); // Start skeleton tracking //kinect.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(kinect_ColorFrameReady); //kinect.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(kinect_SkeletonFrameReady); // Start Kinect device kinect.Start(); } else { MessageBox.Show("No Kinect Device found."); } }
/// <summary> /// Starts up Kinect /// </summary> void StartKinect() { try { kinect = KinectSensor.KinectSensors.FirstOrDefault(); kinect.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(runtime_SkeletonFrameReady); TransformSmoothParameters parameters = new TransformSmoothParameters() { Smoothing = 0.75f, Correction = 0.0f, Prediction = 0.0f, JitterRadius = 0.05f, MaxDeviationRadius = 0.4f }; kinect.SkeletonStream.Enable(parameters); this.skeletonData = new Skeleton[kinect.SkeletonStream.FrameSkeletonArrayLength]; kinect.Start(); Trace.WriteLine("Kinect initialized"); } catch (Exception) { Trace.WriteLine("Error while initializing Kinect. Trying again in 5 seconds..."); kinectRetryTimer.Start(); } }
//Runtime nui; public Kinect() { skeleton = new skeleton(0); //getting first kinect foreach (KinectSensor sensor in KinectSensor.KinectSensors) { if (sensor.Status == KinectStatus.Connected) { kinectSensor = sensor; break; } } if (kinectSensor == null) return; Console.WriteLine(kinectSensor.Status); //starting kinect and enabling skeleton data stream kinectSensor.Start(); kinectSensor.SkeletonStream.Enable(); //register event handler on skeleton fram ready event kinectSensor.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(skeletonFrameReady); }
private void Window_Loaded_1(object sender, RoutedEventArgs e) { miKinect = KinectSensor.KinectSensors[0]; miKinect.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); miKinect.Start(); miKinect.ColorFrameReady += miKinect_ColorFrameReady; }
public KinectControl(HoverButton kinectButton, double layoutHeight, double layoutWidth, List<Button> buttons, TextBox debugBox = null) { _kinectButton = kinectButton; _layoutHeight = layoutHeight; _layoutWidth = layoutWidth; _buttons = buttons; _debugBox = debugBox; _kinect = KinectSensor.KinectSensors.FirstOrDefault(); if (_kinect != null) { _kinect.Start(); _kinect.ColorStream.Enable(); _kinect.SkeletonStream.Enable(new TransformSmoothParameters { Smoothing = 0.7f, Correction = 0.1f, Prediction = 0.1f, JitterRadius = 0.05f, MaxDeviationRadius = 0.05f }); _kinect.SkeletonFrameReady += kinect_SkeletonFrameReady; } _activeRecognizer = CreateRecognizer(); _kinectButton.Click += KinectButton_Click; }
private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e ) { if ( kinect != null ) { kinect.Close(); kinect = null; } }
public KinectSensor connexion_kinect() { foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { sensor = potentialSensor; } } if (null != sensor) { // Turn on the skeleton stream to receive skeleton frames sensor.SkeletonStream.Enable(); // Start the sensor! try { sensor.Start(); } catch (IOException) { return sensor = null; } } return sensor; }
void myChooser_KinectChanged(object sender, KinectChangedEventArgs e) { if (null != e.OldSensor) { //Alten Kinect deaktivieren if (mySensor != null) { mySensor.Dispose(); } } if (null != e.NewSensor) { mySensor = e.NewSensor; mySensor.AudioSource.Start(); mySensor.AudioSource.BeamAngleChanged += new EventHandler<BeamAngleChangedEventArgs>(AudioSource_BeamAngleChanged); mySensor.AudioSource.SoundSourceAngleChanged += new EventHandler<SoundSourceAngleChangedEventArgs>(AudioSource_SoundSourceAngleChanged); myBitmap = new WriteableBitmap(640,480, 96.0, 96.0, PixelFormats.Pbgra32, null); image1.Source = myBitmap; try { this.mySensor.Start(); SensorChooserUI.Visibility = Visibility.Hidden; } catch (IOException) { this.mySensor = null; } } }
public Form1() { InitializeComponent(); outputDevice.Open(); try { if (KinectSensor.KinectSensors.Count == 0) { throw new Exception("Kinectが接続されていません"); } // Kinectインスタンスを取得する kinect = KinectSensor.KinectSensors[0]; // すべてのフレーム更新通知をもらう kinect.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(kinect_AllFramesReady); // Color,Depth,Skeletonを有効にする kinect.SkeletonStream.Enable(); // Kinectの動作を開始する kinect.Start(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
public MainWindow() { InitializeComponent(); _sensor = connectActiveSensor(); _reader = getReaderReady(); }
private void DiscoverKinectSensor() { if (this._Kinect != null && this._Kinect.Status != KinectStatus.Connected) { // If the sensor is no longer connected, we need to discover a new one. this._Kinect = null; } if (this._Kinect == null) { //Find the first connected sensor this._Kinect = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected); if (this._Kinect != null) { //Initialize the found sensor this._Kinect.ColorStream.Enable(); this._Kinect.Start(); ColorImageStream colorSteam = this._Kinect.ColorStream; this._ColorImageBitmap = new WriteableBitmap(colorSteam.FrameWidth, colorSteam.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this._ColorImageBitmapRect = new Int32Rect(0, 0, colorSteam.FrameWidth, colorSteam.FrameHeight); this._ColorImageStride = colorSteam.FrameWidth * colorSteam.FrameBytesPerPixel; this.ColorImageElement.Source = this._ColorImageBitmap; this._ColorImagePixelData = new byte[colorSteam.FramePixelDataLength]; } } }
/// <summary> /// Constructor with database path /// </summary> /// <param name="PathToDatabase">path to database folder</param> public MathiasCore(string PathToDatabase) { DBPATH = PathToDatabase; KinectAudio = new KinectAudioController(); kinect = KinectSensor.GetDefault(); dbManager = new DBManager(DBPATH); }
private KinectHelper(TransformSmoothParameters tsp, bool near = false, ColorImageFormat colorFormat = ColorImageFormat.RgbResolution1280x960Fps12, DepthImageFormat depthFormat = DepthImageFormat.Resolution640x480Fps30) { _kinectSensor = KinectSensor.KinectSensors.FirstOrDefault(s => s.Status == KinectStatus.Connected); if (_kinectSensor == null) { throw new Exception("No Kinect-Sensor found."); } if (near) { _kinectSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; _kinectSensor.DepthStream.Range = DepthRange.Near; _kinectSensor.SkeletonStream.EnableTrackingInNearRange = true; } DepthImageFormat = depthFormat; ColorImageFormat = colorFormat; _kinectSensor.SkeletonStream.Enable(tsp); _kinectSensor.ColorStream.Enable(colorFormat); _kinectSensor.DepthStream.Enable(depthFormat); _kinectSensor.AllFramesReady += AllFramesReady; _kinectSensor.Start(); _faceTracker = new FaceTracker(_kinectSensor); }
public Angulo(KinectSensor miKinect) { InitializeComponent(); this.miKinect = miKinect; angulo = miKinect.ElevationAngle; barraAngulo.Value = angulo; }
public MainWindow() { InitializeComponent(); try { // 利用可能なKinectを探す foreach ( var k in KinectSensor.KinectSensors ) { if ( k.Status == KinectStatus.Connected ) { kinect = k; break; } } if ( kinect == null ) { throw new Exception( "利用可能なKinectがありません" ); } // Colorを有効にする kinect.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>( kinect_ColorFrameReady ); kinect.ColorStream.Enable( ColorImageFormat.RgbResolution640x480Fps30 ); // Kinectの動作を開始する kinect.Start(); } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
public void Dispose() { if (this.BFReader != null) { this.BFReader.FrameArrived -= OnFrameArrived; this.BFReader.Dispose(); } this.BFReader = null; this.Kinect.Close(); this.Kinect = null; }
private void StartKinect() { if (this.disposed) { throw new ObjectDisposedException(nameof(KinectSensor)); } this.kinectSensor = Microsoft.Kinect.KinectSensor.GetDefault(); this.kinectSensor.CoordinateMapper.CoordinateMappingChanged += this.CoordinateMapper_CoordinateMappingChanged; this.whichFrames = FrameSourceTypes.None; if (this.configuration.OutputBodies) { this.whichFrames |= FrameSourceTypes.Body; } if (this.configuration.OutputColor) { this.whichFrames |= FrameSourceTypes.Color; } if (this.configuration.OutputDepth) { this.whichFrames |= FrameSourceTypes.Depth; } if (this.configuration.OutputInfrared) { this.whichFrames |= FrameSourceTypes.Infrared; } if (this.configuration.OutputLongExposureInfrared) { this.whichFrames |= FrameSourceTypes.LongExposureInfrared; } if (this.whichFrames != FrameSourceTypes.None) { this.multiFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(this.whichFrames); this.multiFrameReader.MultiSourceFrameArrived += this.MultiFrameReader_FrameArrived; } if (this.configuration.OutputAudio) { this.audioBeamFrameReader = this.kinectSensor.AudioSource.OpenReader(); this.audioBeamFrameReader.FrameArrived += this.AudioBeamFrameReader_FrameArrived; } this.kinectSensor.Open(); }
/// <summary> /// constructor /// </summary> internal KinectSensor() { _KinectSensor = Microsoft.Kinect.KinectSensor.GetDefault(); _KinectSensor.IsAvailableChanged += _KinectSensor_IsAvailableChanged; updateTimer = new Timer(10000); updateTimer.AutoReset = true; updateTimer.Elapsed += updateTimer_Elapsed; _FPStimer = new KinectFPStimer(); _FPStimer.FpsTimerReady += _FPStimer_FpsTimerReady; initialize(); }
public KinectProducer(JointType[] jointsofinterest) { WorkerFunction = new ThreadStart(this.StartForWorkerThread); WorkerThread = new Thread(WorkerFunction); JointsOfInterest = jointsofinterest; Kinect = KinectSensor.GetDefault(); FrameDescription fd = Kinect.DepthFrameSource.FrameDescription; Kinect.Open(); //Kinect.Open(); Thread.Sleep(200); //Kinect.Close(); if (this.Kinect.IsOpen) { BFReader = Kinect.BodyFrameSource.OpenReader(); } //else // this.Kinect.IsAvailableChanged. //Kinect.Close(); }
public MainWindow() { InitializeComponent(); this.Unloaded += delegate { _kinectSensor.ColorStream.Disable(); _kinectSensor.DepthStream.Disable(); }; this.Loaded += delegate { _kinectSensor = KinectSensor.KinectSensors[0]; _kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); _kinectSensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30); _kinectSensor.ColorFrameReady += ColorFrameReady; _kinectSensor.DepthFrameReady += DepthFrameReady; _kinectSensor.Start(); }; }
private void StartKinect() { if (this.disposed) { throw new ObjectDisposedException(nameof(KinectSensor)); } if (Microsoft.Kinect.KinectSensor.KinectSensors.Count > 0) { this.kinectSensor = Microsoft.Kinect.KinectSensor.KinectSensors[0]; if (kinectSensor.Status == KinectStatus.Connected) { //TODO use configurations to determine what to enable kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); kinectSensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30); kinectSensor.DepthStream.Range = DepthRange.Near; kinectSensor.SkeletonStream.EnableTrackingInNearRange = true; kinectSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; kinectSensor.SkeletonStream.Enable(); kinectSensor.AllFramesReady += Kinect_AllFramesReady; kinectSensor.Start(); // Start streaming audio! //this.audioStream = this.kinectSensor.AudioSource.Start(); // Use a separate thread for capturing audio because audio stream read operations // will block, and we don't want to block main UI thread. this.reading = true; this.readingThread = new Thread(AudioReadingThread); //this.readingThread.Start(); } } }
/// <summary> /// Prepare image data to send /// </summary> /// <param name="depthImageFrame"></param> /// <returns></returns> public bool PrepareImageData() { WriteableBitmap colorBitmap = this.ImageSource as WriteableBitmap; WriteableBitmap depthBitmap = this.DepthSource as WriteableBitmap; this.colorData = new Byte[colorBitmap.PixelHeight * colorBitmap.PixelWidth * bgr32BytesPerPixel]; this.depthData = new Byte[depthBitmap.PixelHeight * depthBitmap.PixelWidth * gray8BytesPerPixel]; colorBitmap.CopyPixels(this.colorData, colorBitmap.BackBufferStride, 0); depthBitmap.CopyPixels(this.depthData, depthBitmap.BackBufferStride, 0); float[] pointTemp = new float[1920 * 1080 * 3]; //3 dimension CameraSpacePoint[] cameraSpacePoints = new CameraSpacePoint[1920 * 1080]; ushort[] depthData = new ushort[512 * 424]; Microsoft.Kinect.KinectSensor sensor = Microsoft.Kinect.KinectSensor.GetDefault(); DepthFrameReader e = sensor.DepthFrameSource.OpenReader(); DepthFrame eFrame = e.AcquireLatestFrame(); eFrame.CopyFrameDataToArray(depthData); // Get 3D point coordinates CoordinateMapper coordinateMapper = sensor.CoordinateMapper; coordinateMapper.MapColorFrameToCameraSpace(depthData, cameraSpacePoints); // Save 3D point coordinates to point3D array for (int i = 0; i < 1920 * 1080; ++i) { pointTemp[i * 3] = cameraSpacePoints[i].X; pointTemp[i * 3 + 1] = cameraSpacePoints[i].Y; pointTemp[i * 3 + 2] = cameraSpacePoints[i].Z; } depthPointsInColorCoordinate = new byte[1920 * 1080 * 3 * sizeof(float)]; Buffer.BlockCopy(pointTemp, 0, depthPointsInColorCoordinate, 0, 1920 * 1080 * 3 * sizeof(float)); return(true); }
public MainWindow() { InitializeComponent(); this.Unloaded += delegate { _kinectSensor.ColorStream.Disable(); _kinectSensor.SkeletonStream.Disable(); }; this.Loaded += delegate { _kinectSensor = Microsoft.Kinect.KinectSensor.KinectSensors[0]; _kinectSensor.ColorFrameReady += ColorFrameReady; _kinectSensor.ColorStream.Enable(); _kinectSensor.SkeletonFrameReady += Pulse; _kinectSensor.SkeletonStream.Enable(); _timer.Interval = new TimeSpan(0, 0, 1); _timer.Tick += new EventHandler(_timer_Tick); _kinectSensor.Start(); }; }
void InitKinect() { // parameters used to smooth the skeleton data kinect.TransformSmoothParameters parameters = new kinect.TransformSmoothParameters(); parameters.Smoothing = 0.3f; parameters.Correction = 0.3f; parameters.Prediction = 0.4f; parameters.JitterRadius = 0.05f; parameters.MaxDeviationRadius = 0.05f; // ============================================================= // create Kinect device: // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit foreach (var potentialSensor in kinect.KinectSensor.KinectSensors) { if (potentialSensor.Status == kinect.KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.imageSource = new DrawingImage(this.drawingGroup); // Display the drawing using our image control //skeletonImage.Source = this.imageSource; // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(parameters); this.sensor.DepthStream.Enable(); this.sensor.ColorStream.Enable(); // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; //this.sensor.DepthFrameReady += this.SensorDepthFrameReady; this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Allocate space to put the color pixels we'll receive this.colorFramePixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // Allocate space to put the depth pixels we'll receive //this.depthPixels = new short[this.sensor.DepthStream.FramePixelDataLength]; // Allocate space to put the color pixels we'll get as result of Depth pixels conversion. One depth pixel will amount to BGR - three color pixels plus one unused //this.colorDepthPixels = new byte[this.sensor.DepthStream.FramePixelDataLength * 4]; // This is the bitmap we'll display on-screen. To work with bitmap extensions(http://writeablebitmapex.codeplex.com/) must be PixelFormats.Pbgra32 this.colorBitmapVideo = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Pbgra32, null); //this.colorBitmapDepth = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data videoImage.Source = this.colorBitmapVideo; //depthImage.Source = this.colorBitmapDepth; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } if (null == this.sensor) { System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in."); } //nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color); //nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex); lastTime = DateTime.Now; isElevationTaskOutstanding = false; ElevationAngle = 0; EnsureElevationAngle(); }
/// <summary> /// Constructor of ImageStream, passing the KinectSensor core to this object /// </summary> /// <param name="kinectSensor"></param> public ImageStream(Microsoft.Kinect.KinectSensor kinectSensor) : base(kinectSensor) { }
/// <summary> /// Constructor of InfraredStream /// </summary> /// <param name="kinectSensor"></param> public InfraredStream(Microsoft.Kinect.KinectSensor kinectSensor) : base(kinectSensor) { }
/// <summary> /// Constructor of DepthStream /// </summary> /// <param name="kinectSensor"></param> public DepthStream(Microsoft.Kinect.KinectSensor kinectSensor) : base(kinectSensor) { }
/// <summary> /// Constructor of BodyStream /// </summary> /// <param name="kinectSensor"></param> public BodyColorStream(Microsoft.Kinect.KinectSensor kinectSensor) : base(kinectSensor) { }
internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectSensor) { var stopWatch = new System.Diagnostics.Stopwatch(); stopWatch.Start(); var objectPoints1 = new List <Vector <double> >(); var colorPoints1 = new List <System.Drawing.PointF>(); var depthPoints1 = new List <System.Drawing.PointF>(); int n = 0; for (float x = -2f; x < 2f; x += 0.2f) { for (float y = -2f; y < 2f; y += 0.2f) { for (float z = 0.4f; z < 4.5f; z += 0.4f) { var kinectCameraPoint = new CameraSpacePoint(); kinectCameraPoint.X = x; kinectCameraPoint.Y = y; kinectCameraPoint.Z = z; // use SDK's projection // adjust Y to make RH coordinate system that is a projection of Kinect 3D points var kinectColorPoint = kinectSensor.CoordinateMapper.MapCameraPointToColorSpace(kinectCameraPoint); kinectColorPoint.Y = colorImageHeight - kinectColorPoint.Y; var kinectDepthPoint = kinectSensor.CoordinateMapper.MapCameraPointToDepthSpace(kinectCameraPoint); kinectDepthPoint.Y = depthImageHeight - kinectDepthPoint.Y; if ((kinectColorPoint.X >= 0) && (kinectColorPoint.X < colorImageWidth) && (kinectColorPoint.Y >= 0) && (kinectColorPoint.Y < colorImageHeight) && (kinectDepthPoint.X >= 0) && (kinectDepthPoint.X < depthImageWidth) && (kinectDepthPoint.Y >= 0) && (kinectDepthPoint.Y < depthImageHeight)) { n++; var objectPoint = Vector <double> .Build.Dense(3); objectPoint[0] = kinectCameraPoint.X; objectPoint[1] = kinectCameraPoint.Y; objectPoint[2] = kinectCameraPoint.Z; objectPoints1.Add(objectPoint); var colorPoint = new System.Drawing.PointF(); colorPoint.X = kinectColorPoint.X; colorPoint.Y = kinectColorPoint.Y; colorPoints1.Add(colorPoint); //Console.WriteLine(objectPoint[0] + "\t" + objectPoint[1] + "\t" + colorPoint.X + "\t" + colorPoint.Y); var depthPoint = new System.Drawing.PointF(); depthPoint.X = kinectDepthPoint.X; depthPoint.Y = kinectDepthPoint.Y; depthPoints1.Add(depthPoint); } } } } this.colorCameraMatrix[0, 0] = 1000; //fx this.colorCameraMatrix[1, 1] = 1000; //fy this.colorCameraMatrix[0, 2] = colorImageWidth / 2; //cx this.colorCameraMatrix[1, 2] = colorImageHeight / 2; //cy this.colorCameraMatrix[2, 2] = 1; var rotation = Vector <double> .Build.Dense(3); var translation = Vector <double> .Build.Dense(3); var colorError = CalibrateColorCamera(objectPoints1, colorPoints1, colorCameraMatrix, colorLensDistortion, rotation, translation, this.silent); var rotationMatrix = RotationExtensions.AxisAngleToMatrix(rotation); this.depthToColorTransform = Matrix <double> .Build.DenseIdentity(4, 4); for (int i = 0; i < 3; i++) { this.depthToColorTransform[i, 3] = translation[i]; for (int j = 0; j < 3; j++) { this.depthToColorTransform[i, j] = rotationMatrix[i, j]; } } this.depthCameraMatrix[0, 0] = 360; //fx this.depthCameraMatrix[1, 1] = 360; //fy this.depthCameraMatrix[0, 2] = depthImageWidth / 2.0; //cx this.depthCameraMatrix[1, 2] = depthImageHeight / 2.0; //cy this.depthCameraMatrix[2, 2] = 1; var depthError = CalibrateDepthCamera(objectPoints1, depthPoints1, depthCameraMatrix, depthLensDistortion, silent); // check projections double depthProjectionError = 0; double colorProjectionError = 0; var testObjectPoint4 = Vector <double> .Build.Dense(4); for (int i = 0; i < n; i++) { var testObjectPoint = objectPoints1[i]; var testDepthPoint = depthPoints1[i]; var testColorPoint = colorPoints1[i]; // "camera space" == depth camera space // depth camera projection double depthU, depthV; Project(depthCameraMatrix, depthLensDistortion, testObjectPoint[0], testObjectPoint[1], testObjectPoint[2], out depthU, out depthV); double dx = testDepthPoint.X - depthU; double dy = testDepthPoint.Y - depthV; depthProjectionError += (dx * dx) + (dy * dy); // color camera projection testObjectPoint4[0] = testObjectPoint[0]; testObjectPoint4[1] = testObjectPoint[1]; testObjectPoint4[2] = testObjectPoint[2]; testObjectPoint4[3] = 1; var color = depthToColorTransform * testObjectPoint4; color *= (1.0 / color[3]); // not necessary for this transform double colorU, colorV; Project(colorCameraMatrix, colorLensDistortion, color[0], color[1], color[2], out colorU, out colorV); dx = testColorPoint.X - colorU; dy = testColorPoint.Y - colorV; colorProjectionError += (dx * dx) + (dy * dy); } depthProjectionError /= n; colorProjectionError /= n; stopWatch.Stop(); if (!this.silent) { Console.WriteLine("FakeCalibration :"); Console.WriteLine("n = " + n); Console.WriteLine("color error = " + colorError); Console.WriteLine("depth error = " + depthError); Console.WriteLine("depth reprojection error = " + depthProjectionError); Console.WriteLine("color reprojection error = " + colorProjectionError); Console.WriteLine("depth camera matrix = \n" + depthCameraMatrix); Console.WriteLine("depth lens distortion = \n" + depthLensDistortion); Console.WriteLine("color camera matrix = \n" + colorCameraMatrix); Console.WriteLine("color lens distortion = \n" + colorLensDistortion); Console.WriteLine(stopWatch.ElapsedMilliseconds + " ms"); Console.WriteLine("________________________________________________________"); } }
/// <summary> /// Constructor of SourceStream, passing the KinectSensor core to this object /// </summary> /// <param name="kinectSensor"></param> public SourceStream(Microsoft.Kinect.KinectSensor kinectSensor) { this.sensor = kinectSensor; }
private void stopKinectService(Microsoft.Kinect.KinectSensor unpluggedKinectSensor) { unpluggedKinectSensor.SkeletonFrameReady -= this.skeletonTracking; }
internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectSensor) { var stopWatch = new System.Diagnostics.Stopwatch(); if (!this.silent) { stopWatch.Start(); } var objectPoints1 = new List <Point3D>(); var colorPoints1 = new List <Point2D>(); var depthPoints1 = new List <Point2D>(); int n = 0; for (float x = -2f; x < 2f; x += 0.2f) { for (float y = -2f; y < 2f; y += 0.2f) { for (float z = 0.4f; z < 4.5f; z += 0.4f) { var kinectCameraPoint = new CameraSpacePoint(); kinectCameraPoint.X = x; kinectCameraPoint.Y = y; kinectCameraPoint.Z = z; // use SDK's projection // adjust Y to make RH coordinate system that is a projection of Kinect 3D points var kinectColorPoint = kinectSensor.CoordinateMapper.MapCameraPointToColorSpace(kinectCameraPoint); kinectColorPoint.Y = colorImageHeight - kinectColorPoint.Y; var kinectDepthPoint = kinectSensor.CoordinateMapper.MapCameraPointToDepthSpace(kinectCameraPoint); kinectDepthPoint.Y = depthImageHeight - kinectDepthPoint.Y; if ((kinectColorPoint.X >= 0) && (kinectColorPoint.X < colorImageWidth) && (kinectColorPoint.Y >= 0) && (kinectColorPoint.Y < colorImageHeight) && (kinectDepthPoint.X >= 0) && (kinectDepthPoint.X < depthImageWidth) && (kinectDepthPoint.Y >= 0) && (kinectDepthPoint.Y < depthImageHeight)) { n++; objectPoints1.Add(new Point3D(kinectCameraPoint.X, kinectCameraPoint.Y, kinectCameraPoint.Z)); var colorPoint = new Point2D(kinectColorPoint.X, kinectColorPoint.Y); colorPoints1.Add(colorPoint); var depthPoint = new Point2D(kinectDepthPoint.X, kinectDepthPoint.Y); depthPoints1.Add(depthPoint); } } } } var initialColorCameraMatrix = Matrix <double> .Build.Dense(3, 3); var initialColorDistortion = Vector <double> .Build.Dense(2); initialColorCameraMatrix[0, 0] = 1000; //fx initialColorCameraMatrix[1, 1] = 1000; //fy initialColorCameraMatrix[0, 2] = colorImageWidth / 2; //cx initialColorCameraMatrix[1, 2] = colorImageHeight / 2; //cy initialColorCameraMatrix[2, 2] = 1; var colorError = CalibrateCameraIntrinsicsAndExtrinsics( objectPoints1, colorPoints1, initialColorCameraMatrix, initialColorDistortion, out this.colorCameraMatrix, out this.colorLensDistortion, out var rotation, out var translation, this.silent); var rotationMatrix = AxisAngleToMatrix(rotation); this.depthToColorTransform = Matrix <double> .Build.DenseIdentity(4, 4); for (int i = 0; i < 3; i++) { this.depthToColorTransform[i, 3] = translation[i]; for (int j = 0; j < 3; j++) { this.depthToColorTransform[i, j] = rotationMatrix[i, j]; } } var initialDepthCameraMatrix = Matrix <double> .Build.Dense(3, 3); var initialDepthDistortion = Vector <double> .Build.Dense(2); initialDepthCameraMatrix[0, 0] = 360; //fx initialDepthCameraMatrix[1, 1] = 360; //fy initialDepthCameraMatrix[0, 2] = depthImageWidth / 2.0; //cx initialDepthCameraMatrix[1, 2] = depthImageHeight / 2.0; //cy initialDepthCameraMatrix[2, 2] = 1; var depthError = CalibrateCameraIntrinsics( objectPoints1, depthPoints1, initialDepthCameraMatrix, initialDepthDistortion, out this.depthCameraMatrix, out this.depthLensDistortion, this.silent); // check projections double depthProjectionError = 0; double colorProjectionError = 0; var testObjectPoint4 = Vector <double> .Build.Dense(4); for (int i = 0; i < n; i++) { var testObjectPoint = objectPoints1[i]; var testDepthPoint = depthPoints1[i]; var testColorPoint = colorPoints1[i]; // "camera space" == depth camera space // depth camera projection Project(depthCameraMatrix, depthLensDistortion, testObjectPoint, out Point2D projectedDepthPoint); double dx = testDepthPoint.X - projectedDepthPoint.X; double dy = testDepthPoint.Y - projectedDepthPoint.Y; depthProjectionError += (dx * dx) + (dy * dy); // color camera projection testObjectPoint4[0] = testObjectPoint.X; testObjectPoint4[1] = testObjectPoint.Y; testObjectPoint4[2] = testObjectPoint.Z; testObjectPoint4[3] = 1; var color = depthToColorTransform * testObjectPoint4; color *= 1.0 / color[3]; // not necessary for this transform Project(colorCameraMatrix, colorLensDistortion, new Point3D(color[0], color[1], color[2]), out Point2D projectedColorPoint); dx = testColorPoint.X - projectedColorPoint.X; dy = testColorPoint.Y - projectedColorPoint.Y; colorProjectionError += (dx * dx) + (dy * dy); } depthProjectionError /= n; colorProjectionError /= n; if (!this.silent) { stopWatch.Stop(); Console.WriteLine("FakeCalibration :"); Console.WriteLine("n = " + n); Console.WriteLine("color error = " + colorError); Console.WriteLine("depth error = " + depthError); Console.WriteLine("depth reprojection error = " + depthProjectionError); Console.WriteLine("color reprojection error = " + colorProjectionError); Console.WriteLine("depth camera matrix = \n" + depthCameraMatrix); Console.WriteLine("depth lens distortion = \n" + depthLensDistortion); Console.WriteLine("color camera matrix = \n" + colorCameraMatrix); Console.WriteLine("color lens distortion = \n" + colorLensDistortion); Console.WriteLine(stopWatch.ElapsedMilliseconds + " ms"); Console.WriteLine("________________________________________________________"); } }