public MainWindow() { // get Active Kinect Sensor this.sensor = KinectSensor.GetDefault(); // open the sensor this.sensor.Open(); // open the reader for the body frames this.bodyFrameReader = sensor.BodyFrameSource.OpenReader(); this.bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived; // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); this.InitializeComponent(); // get screen with and height screenWidth = (int)SystemParameters.PrimaryScreenWidth; screenHeight = (int)SystemParameters.PrimaryScreenHeight; // set up timer, execute every 0.1s timer.Interval = new TimeSpan(0, 0, 0, 0, 100); timer.Tick += new EventHandler(Timer_Tick); timer.Start(); int maxBodies = this.sensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f, -1.0f, false, false, false); GestureDetector detector = new GestureDetector(this.sensor, result); this.gestureDetectorList.Add(detector); } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { vgbFrameSource.AddGestures(database.AvailableGestures); } }
/// <summary> /// Handles gesture detection results arriving from the sensor for the associated body tracking Id /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_GestureFrameArrived(object sender, VisualGestureBuilderFrameArrivedEventArgs e) { VisualGestureBuilderFrameReference frameReference = e.FrameReference; using (VisualGestureBuilderFrame frame = frameReference.AcquireFrame()) { if (frame != null) { // get the discrete gesture results which arrived with the latest frame IReadOnlyDictionary <Gesture, DiscreteGestureResult> discreteResults = frame.DiscreteGestureResults; if (discreteResults != null) { // we only have one gesture in this source object, but you can get multiple gestures foreach (Gesture gesture in this.vgbFrameSource.Gestures) { if (gesture.GestureType == GestureType.Discrete) { DiscreteGestureResult result = null; discreteResults.TryGetValue(gesture, out result); if (result != null) { if (gesture.Name.Equals(this.ASLStandGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } else if (gesture.Name.Equals(this.ASLWalkGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } else if (gesture.Name.Equals(this.ASLTurnLeftGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } else if (gesture.Name.Equals(this.ASLTurnRightGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } else if (gesture.Name.Equals(this.ASLTurnLeftGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } else if (gesture.Name.Equals(this.ASLSitGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } else if (gesture.Name.Equals(this.ASLStopGestureName)) { GestureResultView.UpdateGestureResult(true, gesture.Name, result.Detected, result.Confidence); } } } } } } } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } try { sp.PortName = "COM3"; sp.BaudRate = 9600; sp.Open(); } catch { } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name /*foreach (Gesture gesture in database.AvailableGestures) * { * if (gesture.Name.Equals(this.leftledGestureName)) * { * this.vgbFrameSource.AddGesture(gesture); * } * if (gesture.Name.Equals(this.rightledGestureName)) * { * this.vgbFrameSource.AddGesture(gesture); * } * }*/ this.vgbFrameSource.AddGestures(database.AvailableGestures); } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; ////////////////////////////////////////////////////////////////////////////////////////////////////////////// //this.GestureResultView.Play(); //this.GestureResultView.Test(); //this.GestureResultView.Pause(); //this.GestureResultView.Next(); //this.GestureResultView.Prev(); ////////////////////////////////////////////////////////////////////////////////////////////////////////////// // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { //this.vgbFrameSource.AddGestures(database.AvailableGestures); // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.SkipSongLeft)) { this.vgbFrameSource.AddGesture(gesture); } if (gesture.Name.Equals(this.SkipSongRight)) { this.vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView,int[] bodiesSelectStatus) { this.bodiesSelectStatus = bodiesSelectStatus; if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.waveDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.waveGestureName)) { this.vgbFrameSource.AddGesture(gesture); } if (gesture.Name.Equals(this.waveRightGestureName)) { this.vgbFrameSource.AddGesture(gesture); } if (gesture.Name.Equals(this.raiseRightHandGestureName)) { this.vgbFrameSource.AddGesture(gesture); } if (gesture.Name.Equals(this.raiseLeftHandGestureName)) { this.vgbFrameSource.AddGesture(gesture); } } } }
public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { //init LifeCycleEvents.. lce = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode) mmic = new MmiCommunication("localhost", 9876, "User1", "ASR"); //PORT TO FUSION - uncomment this line to work with fusion later //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName) mmic.Send(lce.NewContextRequest()); if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.crouch_gesture) || gesture.Name.Equals(this.dab_gesture) || gesture.Name.Equals(this.hey_gesture) || gesture.Name.Equals(this.hold_gesture) || gesture.Name.Equals(this.reload_gesture)) { this.vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { main = this; // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result, main); this.gestureDetectorList.Add(detector); } }
// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { //init LifeCycleEvents.. lce = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode) mmic = new MmiCommunication("localhost", 9876, "User1", "ASR"); //PORT TO FUSION - uncomment this line to work with fusion later //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName) mmic.Send(lce.NewContextRequest()); count = 0; if (kinectSensor == null) { throw new ArgumentNullException("Kinect Sensor is null"); } if (gestureResultView == null) { throw new ArgumentNullException("Gesture Result View is null"); } GestureResultView = gestureResultView; // Create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); vgbFrameSource.TrackingIdLost += Source_TrackingIdLost; // Open the reader for the vgb frames vgbFrameReader = vgbFrameSource.OpenReader(); if (vgbFrameReader != null) { vgbFrameReader.IsPaused = true; vgbFrameReader.FrameArrived += Reader_GestureFrameArrived; } // Load gestures from database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabase)) { foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(stop) || gesture.Name.Equals(back) || gesture.Name.Equals(skip) || gesture.Name.Equals(vdown) || gesture.Name.Equals(vup)) { vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { this.grv = gestureResultView; if (kinectSensor == null) { Console.WriteLine("no Kinect"); throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(@"Database\basicnav.gbd")) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name // foreach (Gesture gesture in database.AvailableGestures) // { // if (gesture.Name.Equals(this.seatedGestureName)) // { //only load "basicnav.gbd" this.vgbFrameSource.AddGestures(database.AvailableGestures); // } // } } }
/// <summary> /// 初始化GestureDetector类的新实例以及手势帧源和读取器 /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; //创建vgb源。 当有效的身体框架从传感器到达时,将设置相关的身体跟踪ID。 // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; //打开阅读器的vgb帧 // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } //从手势数据库加载“坐着”手势 // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { //我们可以通过调用vgbFrameSource.AddGestures(database.AvailableGestures)来加载数据库中所有可用的手势, //但是对于这个程序,我们只想跟踪数据库中的一个离散手势,所以我们将按名称加载它 // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.seatedGestureName)) { this.vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); this.gestureResultView = new GestureResultView(false, false, 0.0f, ""); // initialize the gesture detection objects for our gestures this.gestureDetector = new GestureDetector(this.kinectSensor, this.gestureResultView); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; this.gestureResultGrid.DataContext = this.gestureResultView; }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI //this.DataContext = this; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI GestureResultView result = new GestureResultView(0, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetector = detector; // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetector.GestureResultView; Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, 0); this.contentGrid.Children.Add(contentControl); }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView, MainWindow main) { this.GestureResultView = gestureResultView; this.main = main; if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } //Init lifeCycleEvents lce = new LifeCycleEvents("ASR", "IM", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode) mmic = new MmiCommunication("localhost", 9876, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName) mmic.Send(lce.NewContextRequest()); // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); //this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the gestures from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name this.vgbFrameSource.AddGestures(database.AvailableGestures); } }
/// <summary> /// Handles gesture detection results arriving from the sensor for the associated body tracking Id /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> /// private void Reader_GestureFrameArrived(object sender, VisualGestureBuilderFrameArrivedEventArgs e) { VisualGestureBuilderFrameReference frameReference = e.FrameReference; using (VisualGestureBuilderFrame frame = frameReference.AcquireFrame()) { bool isGestureDetected = false; bool iscrouched = false; bool isdabbing = false; bool isheying = false; bool isholding = false; bool isreloading = false; float level = 0; if (frame != null) { // get the discrete gesture results which arrived with the latest frame IReadOnlyDictionary <Gesture, DiscreteGestureResult> discreteResults = frame.DiscreteGestureResults; IReadOnlyDictionary <Gesture, ContinuousGestureResult> continuousResults = frame.ContinuousGestureResults; if (discreteResults != null) { } if (continuousResults != null) { foreach (Gesture gesture in vgbFrameSource.Gestures) { if (gesture.Name.Equals(this.crouch_gesture) || gesture.Name.Equals(this.dab_gesture) || gesture.Name.Equals(this.hey_gesture) || gesture.Name.Equals(this.hold_gesture) || gesture.Name.Equals(this.reload_gesture)) { { ContinuousGestureResult result = null; continuousResults.TryGetValue(gesture, out result); if (result != null) { level = result.Progress; if (level >= 0.8) { hysterisis++; if (hysterisis != 20) { return; } hysterisis = 0; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_crouch.Opacity = 0.0; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_dab.Opacity = 0.0; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_hey.Opacity = 0.0; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_hold.Opacity = 0.0; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_reload.Opacity = 0.0; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_normal.Opacity = 0.0; if (gesture.Name.Equals(crouch_gesture)) { sre_GestureRecognized(level, "CROUCH"); Console.WriteLine(" CROUCH "); isGestureDetected = true; iscrouched = true; isdabbing = false; isheying = false; isholding = false; isreloading = false; // Never do this, use MVVM, only used this because there's no time left ((MainWindow)System.Windows.Application.Current.MainWindow).current_gesture.Text = "Crouch"; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_crouch.Opacity = 0.5; } else if (gesture.Name.Equals(dab_gesture)) { sre_GestureRecognized(level, "DAB"); Console.WriteLine(" DAB "); isGestureDetected = true; iscrouched = false; isdabbing = true; isheying = false; isholding = false; isreloading = false; ((MainWindow)System.Windows.Application.Current.MainWindow).current_gesture.Text = "Dab"; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_dab.Opacity = 0.5; } else if (gesture.Name.Equals(hey_gesture)) { sre_GestureRecognized(level, "HEY"); Console.WriteLine(" HEY "); isGestureDetected = true; iscrouched = false; isdabbing = true; isheying = false; isholding = false; isreloading = false; ((MainWindow)System.Windows.Application.Current.MainWindow).current_gesture.Text = "Hey"; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_hey.Opacity = 0.5; } else if (gesture.Name.Equals(hold_gesture)) { sre_GestureRecognized(level, "HOLD"); Console.WriteLine(" HOLD "); isGestureDetected = true; iscrouched = false; isdabbing = true; isheying = false; isholding = false; isreloading = false; ((MainWindow)System.Windows.Application.Current.MainWindow).current_gesture.Text = "Hold"; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_hold.Opacity = 0.5; } else if (gesture.Name.Equals(reload_gesture)) { sre_GestureRecognized(level, "RELOAD"); Console.WriteLine(" RELOAD "); isGestureDetected = true; iscrouched = false; isdabbing = true; isheying = false; isholding = false; isreloading = false; ((MainWindow)System.Windows.Application.Current.MainWindow).current_gesture.Text = "Reload"; ((MainWindow)System.Windows.Application.Current.MainWindow).overlay_reload.Opacity = 0.5; } } } } } } GestureResultView.UpdateGestureResult(true, isGestureDetected, iscrouched, isdabbing, isheying, isholding, isreloading, level); } } } }
// Handle gesture detection results arriving from the sensor for the associated body tracking Id private void Reader_GestureFrameArrived(object sender, VisualGestureBuilderFrameArrivedEventArgs e) { VisualGestureBuilderFrameReference frameReference = e.FrameReference; using (var frame = vgbFrameReader.CalculateAndAcquireLatestFrame()) { bool anyGestureDetected = false; bool stopDetected = false; bool skipDetected = false; bool backDetected = false; bool vupDetected = false; bool vdownDetected = false; float progress = 0; if (frame != null) { // Get gestures results var discreteResults = frame.DiscreteGestureResults; var continuousResults = frame.ContinuousGestureResults; if (discreteResults != null) { foreach (Gesture gesture in vgbFrameSource.Gestures) { if (gesture.GestureType == GestureType.Discrete) { DiscreteGestureResult result = null; discreteResults.TryGetValue(gesture, out result); if (result != null) { Console.WriteLine("Discrete Gesture"); anyGestureDetected = false; } } } } if (continuousResults != null) { foreach (Gesture gesture in vgbFrameSource.Gestures) { if (gesture.Name.Equals(stop) || gesture.Name.Equals(back) || gesture.Name.Equals(skip) || gesture.Name.Equals(vdown) || gesture.Name.Equals(vup)) { ContinuousGestureResult result = null; continuousResults.TryGetValue(gesture, out result); if (result != null) { progress = result.Progress; if (progress >= 1) { count++; if (count != 15) { return; } count = 0; if (gesture.Name.Equals(stop)) { sendMessage("PAUSE", progress); anyGestureDetected = true; stopDetected = true; skipDetected = false; backDetected = false; vupDetected = false; vdownDetected = false; } else if (gesture.Name.Equals(skip)) { sendMessage("BACK", progress); anyGestureDetected = true; stopDetected = false; skipDetected = true; backDetected = false; vupDetected = false; vdownDetected = false; } else if (gesture.Name.Equals(back)) { sendMessage("SKIP", progress); anyGestureDetected = true; stopDetected = false; skipDetected = false; backDetected = true; vupDetected = false; vdownDetected = false; } else if (gesture.Name.Equals(vup)) { sendMessage("VUP", progress); anyGestureDetected = true; stopDetected = false; skipDetected = false; backDetected = false; vupDetected = true; vdownDetected = false; } else if (gesture.Name.Equals(vdown)) { sendMessage("VDOWN", progress); anyGestureDetected = true; stopDetected = false; skipDetected = false; backDetected = false; vupDetected = false; vdownDetected = true; } } } } } } GestureResultView.UpdateGestureResult(true, anyGestureDetected, stopDetected, skipDetected, backDetected, vupDetected, vdownDetected, progress); } } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> /// public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); try { sp.PortName = "COM6"; sp.BaudRate = 9600; sp.Open(); } catch (Exception) { MessageBox.Show("Please give a valid port number or check your connection"); } speaker.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI // int col0Row = 0; // int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; //int maxBodies = 1; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (this.kinectSensor != null) { // open the sensor this.kinectSensor.Open(); // grab the audio stream IReadOnlyList <AudioBeam> audioBeamList = this.kinectSensor.AudioSource.AudioBeams; System.IO.Stream audioStream = audioBeamList[0].OpenInputStream(); // create the convert stream this.convertStream = new KinectAudioStream(audioStream); } else { // on failure, set the status text return; } RecognizerInfo ri = TryGetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); // Create a grammar from grammar definition XML file. using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) { var g = new Grammar(memoryStream); this.speechEngine.LoadGrammar(g); } this.speechEngine.SpeechRecognized += this.SpeechRecognized; this.speechEngine.SpeechRecognitionRejected += this.SpeechRejected; // let the convertStream know speech is going active this.convertStream.SpeechActive = true; // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model. // This will prevent recognition accuracy from degrading over time. ////speechEngine.UpdateRecognizerSetting("AdaptationOn", 0); this.speechEngine.SetInputToAudioStream( this.convertStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); this.speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { //this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer; } } ContentControl contentControl2 = new ContentControl(); contentControl2.Content = this.gestureDetectorList[0].GestureResultView; this.contentGrid.Children.Add(contentControl2); }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database /* * using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) * { * // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), * // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name * foreach (Gesture gesture in database.AvailableGestures) * { * if (gesture.Name.Equals(this.seatedGestureName)) * { * this.vgbFrameSource.AddGesture(gesture); * } * } * } */ using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { Console.WriteLine("read success"); // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name //this.vgbFrameSource.AddGestures(database.AvailableGestures); Console.WriteLine(database.AvailableGesturesCount); foreach (Gesture gesture in database.AvailableGestures) { Console.WriteLine(gesture.Name); if (gesture.Name.Equals(this.waveGestureName)) { Console.WriteLine("wave"); this.vgbFrameSource.AddGesture(gesture); } if (gesture.Name.Equals(this.paperGestureName)) { Console.WriteLine("paper"); this.vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI gestureResultView = new GestureResultView(false, false, 0.0f, "null"); gestureDetector = new GestureDetector(this.kinectSensor, gestureResultView); ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetector.GestureResultView; Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, 1); this.contentGrid.Children.Add(contentControl); MoveTo(0, 0); DispatcherTimer timer = new DispatcherTimer(); timer.Interval = TimeSpan.FromSeconds(0.1); timer.Tick += timer_Tick; timer.Start(); this.Abcsissa = abcsissa; this.Ordinate = ordinate; if (serialAttached == true) { this.serialport = new SerialPort(); serialport.PortName = "COM3"; serialport.Open(); serialport.BaudRate = 57600; } }//main window
// Handle the TrackingIdLost event for the VisualGestureBuilderSource object private void Source_TrackingIdLost(object sender, TrackingIdLostEventArgs e) { // Update the GestureResultView object to show the 'Not Tracked' image in the UI GestureResultView.UpdateGestureResult(false, false, false, false, false, false, false, 0.0f); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); this._reader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); this._reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // connect to htk server via tcpClient //this.clientInterface = ClientInterface.getClientInstance(); //clientInterface.connect(); //Console.WriteLine("connect to the client interface \n " + clientInterface.GetHashCode() + "\n"); //clientInterface.disconnect(); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0, col1Row = 0; this.colorFrameWriter = new ColorFrameWriter(); this.depthFrameWriter = new DepthFrameWriter(); this.jointDataWriter = new JointDataWriter(); this.totalCapturedFrames_joints = 0; this.totalCapturedFrames_color = 0; this.totalCapturedFrames_depth = 0; session_number = 1; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; /* if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl);*/ } prevDeleteButton.Click += deletePreviousSample; currentPhraseName.Text = phrase_list[current_phrase_index]; String current_phrase = phrase_list[current_phrase_index]; char[] delims = { '_' }; String[] words = current_phrase.Split(delims); StringBuilder builder = new StringBuilder(); foreach (string s in words) { builder.Append(s.ToLower()).Append(" "); } String cleanedPhrase = builder.ToString().TrimEnd(new char[] { ' ' }); cleanedPhrase += ".png"; //Console.WriteLine("!!!!!!!!!!!!!@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + cleanedPhrase); BitmapImage image = new BitmapImage(); image.BeginInit(); image.UriSource = new Uri(System.IO.Path.Combine( @"C:\Users\aslr\Documents\GitHub\SignLanguageRecognition\phrase-sampler-2.0\phrase_images", cleanedPhrase)); image.EndInit(); phraseImage.Source = image; phrase_name = phrase_list[current_phrase_index]; /*clientInterface.sendData("new_phrase"); clientInterface.sendData(phrase_name);*/ //String mainDir = System.IO.Path.Combine(@"C:\Users\aslr\Documents\aslr-data", phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); //Console.WriteLine("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& " + kinect.DepthStream.FrameWidth);\ mainDir = System.IO.Path.Combine(dataWritePath, phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); //System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); colorFrameWriter.setCurrentPhrase(phrase_name); depthFrameWriter.setCurrentPhrase(phrase_name); jointDataWriter.setCurrentPhrase(phrase_name); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // connect to htk server via tcpClient this.clientInterface = ClientInterface.getClientInstance(); clientInterface.connect(); Console.WriteLine("connect to the client interface \n " + clientInterface.GetHashCode() + "\n"); //clientInterface.disconnect(); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0, col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; /* if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl);*/ } prevDeleteButton.Click += deletePreviousSample; currentPhraseName.Text = (current_phrase_index+1) + " " + phrase_list[current_phrase_index]; phrase_name = phrase_list[current_phrase_index]; clientInterface.sendData("new_phrase"); clientInterface.sendData(phrase_name); }
public MainWindow() { main = this; InitializeComponent(); options.Add(new TodoItem() { Title = "Pesquisar Voo para Paris", Color = "#ff00BCF2" }); options.Add(new TodoItem() { Title = "Pesquisar Voo para Roma" }); options.Add(new TodoItem() { Title = "Pesquisar Voo para Londres" }); options.Add(new TodoItem() { Title = "Pesquisar Hotel para Paris" }); options.Add(new TodoItem() { Title = "Pesquisar Hotel para Roma" }); options.Add(new TodoItem() { Title = "Pesquisar Hotel para Londres" }); lbTodoList.ItemsSource = options; // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0; //int col1Row = 0; //int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; //for (int i = 0; i < maxBodies; ++i) //{ GestureResultView result = new GestureResultView(0, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result, this.main, circle, this.Dispatcher); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[0].GestureResultView; //if (i % 2 == 0) //{ // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, 2); //++col0Row; //} //else //{ // Gesture results for bodies: 1, 3, 5 // Grid.SetColumn(contentControl, 1); //Grid.SetRow(contentControl, col1Row); // ++col1Row; //} this.contentGrid.Children.Add(contentControl); //} //init LifeCycleEvents.. lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode) //mmic = new MmiCommunication("localhost",9876,"User1", "ASR"); //PORT TO FUSION - uncomment this line to work with fusion later mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName) mmic.Send(lce.NewContextRequest()); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); this._reader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); this._reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // connect to htk server via tcpClient //this.clientInterface = ClientInterface.getClientInstance(); //clientInterface.connect(); //Console.WriteLine("connect to the client interface \n " + clientInterface.GetHashCode() + "\n"); //clientInterface.disconnect(); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0, col1Row = 0; this.colorFrameWriter = new ColorFrameWriter(); this.depthFrameWriter = new DepthFrameWriter(); this.jointDataWriter = new JointDataWriter(); this.totalCapturedFrames_joints = 0; this.totalCapturedFrames_color = 0; this.totalCapturedFrames_depth = 0; this.framesCapturedInPhrase = 0; this.phrase_indices = new int[phrase_list.Length]; if (File.Exists(@".\indices_state.txt")) { System.IO.StreamReader file = new System.IO.StreamReader(@".\indices_state.txt"); String indices_line = file.ReadLine(); String[] indices_states = indices_line.Split(' '); for (int i = 0; i < phrase_indices.Length; i++) { if (i >= indices_states.Length) { this.phrase_indices[i] = 0; } else { this.phrase_indices[i] = Int32.Parse(indices_states[i]); } } } else { for (int i = 0; i < phrase_indices.Length; i++) { this.phrase_indices[i] = 0; } } random_phrase_indices = new int[phrase_list.Length]; random_phrase_inx_counter = 0; for (int i = 0; i < phrase_list.Length; i++) { random_phrase_indices[i] = i; } new Random().Shuffle(random_phrase_indices); session_number = 1; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; /* * if (i % 2 == 0) * { * // Gesture results for bodies: 0, 2, 4 * Grid.SetColumn(contentControl, 0); * Grid.SetRow(contentControl, col0Row); ++col0Row; * } * else * { * // Gesture results for bodies: 1, 3, 5 * Grid.SetColumn(contentControl, 1); * Grid.SetRow(contentControl, col1Row); ++col1Row; * } * * this.contentGrid.Children.Add(contentControl);*/ } prevDeleteButton.Click += deletePreviousSample; currentPhraseName.Text = phrase_list[current_phrase_index]; String current_phrase = phrase_list[current_phrase_index]; char[] delims = { '_' }; String[] words = current_phrase.Split(delims); StringBuilder builder = new StringBuilder(); foreach (string s in words) { builder.Append(s.ToLower()).Append(" "); } String cleanedPhrase = builder.ToString().TrimEnd(new char[] { ' ' }); cleanedPhrase += ".png"; //Console.WriteLine("!!!!!!!!!!!!!@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + cleanedPhrase); BitmapImage image = new BitmapImage(); image.BeginInit(); image.UriSource = new Uri(System.IO.Path.Combine( @"C:\Users\aslr\Documents\GitHub\SignLanguageRecognition\phrase-sampler-3.0\phrase_images", cleanedPhrase)); image.EndInit(); phraseImage.Source = image; phrase_name = phrase_list[current_phrase_index]; /*clientInterface.sendData("new_phrase"); * clientInterface.sendData(phrase_name);*/ //String mainDir = System.IO.Path.Combine(@"C:\Users\aslr\Documents\aslr-data", phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); //Console.WriteLine("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& " + kinect.DepthStream.FrameWidth);\ mainDir = System.IO.Path.Combine(dataWritePath, phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); //System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); colorFrameWriter.setCurrentPhrase(phrase_name); depthFrameWriter.setCurrentPhrase(phrase_name); jointDataWriter.setCurrentPhrase(phrase_name); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // open the sensor this.kinectSensor.Open(); // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); detector.GestureDetected += this.GestureDetected; this.gestureDetectorList.Add(detector); } }
/// <summary> /// 初始化MainWindow类的新实例 /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported //当前只有一个传感器被支持 this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier //设置IsAvailableChanged事件通知程序 this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor //打开kinect this.kinectSensor.Open(); // set the status text //设置状态正文 this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames //打开身体框架阅读器 this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier // 设置BodyFramedArrived事件通知程序 this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI // 初始化BodyViewer对象以在UI中显示跟踪的实体 this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures // 为我们的手势初始化手势检测对象 this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow // 初始化主窗口 this.InitializeComponent(); // set our data context objects for display in UI // 设置我们的数据上下文对象以在UI中显示 this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI // 为每个身体创建一个手势检测器(6个身体=> 6个检测器)并创建内容控件以在UI中显示结果 int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid // 在内容网格的前两列中分割手势结果 ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 // 身体的手势结果:0,2,4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 //身体的手势结果:1,3,5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> /// public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = 1; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); detector.setup(); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); // foreach (var database in GestureHelper.gestures) // { GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // } // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } // Face detection _faceSource = new FaceFrameSource(kinectSensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LookingAway | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; this.sp = new SerialPort(Properties.Settings.Default.ComPort, Properties.Settings.Default.Baudrate, Parity.None, 8, StopBits.One); }