/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { //LCE = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); //MMIC = new MmiCommunication("localhost", 9876, "User1", "ASR"); LCE = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command"); MMIC = new MmiCommunication("localhost", 9876, "User1", "TOUCH"); //CHANGED TO USER1 //LCE = new LifeCycleEvents("KINECT", "FUSION", "kinect-1", "kinect", "command"); //MMIC = new MmiCommunication("localhost", 9876, "User1", "KINECT"); MMIC.Send(LCE.NewContextRequest()); if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the gestures from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { this.vgbFrameSource.AddGestures(database.AvailableGestures); //load all gestures from the database } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }