/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { main = this; // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result, main); this.gestureDetectorList.Add(detector); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); this.gestureResultView = new GestureResultView(false, false, 0.0f, ""); // initialize the gesture detection objects for our gestures this.gestureDetector = new GestureDetector(this.kinectSensor, this.gestureResultView); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; this.gestureResultGrid.DataContext = this.gestureResultView; }
public MainWindow() { kinectSensor = KinectSensor.GetDefault(); kinectSensor.IsAvailableChanged += Sensor_IsAvailableChanged; kinectSensor.Open(); StatusText = kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += Reader_BodyFrameArrived; kinectBodyView = new KinectBodyView(kinectSensor); detectorPerBodyList = new List <List <GestureDetector> >(); InitializeComponent(); // set our data context objects for display in UI DataContext = this; kinectBodyViewbox.DataContext = kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI createDetectors(); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // open the sensor this.kinectSensor.Open(); // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); detector.GestureDetected += this.GestureDetected; this.gestureDetectorList.Add(detector); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> /// public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = 1; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); detector.setup(); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }
/// <summary> /// Initializes a new instance of the GestureResultView class and sets initial property values /// </summary> /// <param name="bodyIndex">Body Index associated with the current gesture detector</param> /// <param name="isTracked">True, if the body is currently tracked</param> /// <param name="detected">True, if the gesture is currently detected for the associated body</param> /// <param name="confidence">Confidence value for detection of the 'Seated' gesture</param> public ZeroResultView(int bodyIndex, bool isTracked, bool detected, double confidence, KinectBodyView kinectBodyView) { this.BodyIndex = bodyIndex; this.IsTracked = isTracked; this.Detected = detected; this.Confidence = confidence; this.ImageSource = this.notTrackedImage; }
/// <summary> /// Updates the values associated with the discrete gesture detection result /// </summary> /// <param name="isBodyTrackingIdValid">True, if the body associated with the GestureResultView object is still being tracked</param> /// <param name="isGestureDetected">True, if the discrete gesture is currently detected for the associated body</param> /// <param name="detectionConfidence">Confidence value for detection of the discrete gesture</param> public void UpdateZeroResult(bool isBodyTrackingIdValid, bool isGestureDetected, double detectionConfidence, KinectBodyView kinectBodyView) { this.IsTracked = isBodyTrackingIdValid; this.Confidence = 0.0f; if (!this.IsTracked) { this.ImageSource = this.notTrackedImage; this.Detected = false; this.BodyColor = Brushes.Gray; } else { //this.Detected = isGestureDetected; //this.BodyColor = this.trackedColors[2]; //if (this.Detected) if (kinectBodyView.kinectFeedback.currentFlexAngle <= 5.0f || kinectBodyView.kinectFeedback.currentFlexAngle >= -5.0f) { //this.Confidence = detectionConfidence; this.ImageSource = this.zeroImage; this.BodyColor = this.trackedColors[2]; } else { this.ImageSource = this.notZeroImage; this.BodyColor = this.trackedColors[0]; } } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> /// public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); try { sp.PortName = "COM6"; sp.BaudRate = 9600; sp.Open(); } catch (Exception) { MessageBox.Show("Please give a valid port number or check your connection"); } speaker.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI // int col0Row = 0; // int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; //int maxBodies = 1; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (this.kinectSensor != null) { // open the sensor this.kinectSensor.Open(); // grab the audio stream IReadOnlyList <AudioBeam> audioBeamList = this.kinectSensor.AudioSource.AudioBeams; System.IO.Stream audioStream = audioBeamList[0].OpenInputStream(); // create the convert stream this.convertStream = new KinectAudioStream(audioStream); } else { // on failure, set the status text return; } RecognizerInfo ri = TryGetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); // Create a grammar from grammar definition XML file. using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) { var g = new Grammar(memoryStream); this.speechEngine.LoadGrammar(g); } this.speechEngine.SpeechRecognized += this.SpeechRecognized; this.speechEngine.SpeechRecognitionRejected += this.SpeechRejected; // let the convertStream know speech is going active this.convertStream.SpeechActive = true; // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model. // This will prevent recognition accuracy from degrading over time. ////speechEngine.UpdateRecognizerSetting("AdaptationOn", 0); this.speechEngine.SetInputToAudioStream( this.convertStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); this.speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { //this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer; } } ContentControl contentControl2 = new ContentControl(); contentControl2.Content = this.gestureDetectorList[0].GestureResultView; this.contentGrid.Children.Add(contentControl2); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); this._reader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); this._reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // connect to htk server via tcpClient //this.clientInterface = ClientInterface.getClientInstance(); //clientInterface.connect(); //Console.WriteLine("connect to the client interface \n " + clientInterface.GetHashCode() + "\n"); //clientInterface.disconnect(); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0, col1Row = 0; this.colorFrameWriter = new ColorFrameWriter(); this.depthFrameWriter = new DepthFrameWriter(); this.jointDataWriter = new JointDataWriter(); this.totalCapturedFrames_joints = 0; this.totalCapturedFrames_color = 0; this.totalCapturedFrames_depth = 0; this.framesCapturedInPhrase = 0; this.phrase_indices = new int[phrase_list.Length]; if (File.Exists(@".\indices_state.txt")) { System.IO.StreamReader file = new System.IO.StreamReader(@".\indices_state.txt"); String indices_line = file.ReadLine(); String[] indices_states = indices_line.Split(' '); for (int i = 0; i < phrase_indices.Length; i++) { if (i >= indices_states.Length) { this.phrase_indices[i] = 0; } else { this.phrase_indices[i] = Int32.Parse(indices_states[i]); } } } else { for (int i = 0; i < phrase_indices.Length; i++) { this.phrase_indices[i] = 0; } } random_phrase_indices = new int[phrase_list.Length]; random_phrase_inx_counter = 0; for (int i = 0; i < phrase_list.Length; i++) { random_phrase_indices[i] = i; } new Random().Shuffle(random_phrase_indices); session_number = 1; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; /* * if (i % 2 == 0) * { * // Gesture results for bodies: 0, 2, 4 * Grid.SetColumn(contentControl, 0); * Grid.SetRow(contentControl, col0Row); ++col0Row; * } * else * { * // Gesture results for bodies: 1, 3, 5 * Grid.SetColumn(contentControl, 1); * Grid.SetRow(contentControl, col1Row); ++col1Row; * } * * this.contentGrid.Children.Add(contentControl);*/ } prevDeleteButton.Click += deletePreviousSample; currentPhraseName.Text = phrase_list[current_phrase_index]; String current_phrase = phrase_list[current_phrase_index]; char[] delims = { '_' }; String[] words = current_phrase.Split(delims); StringBuilder builder = new StringBuilder(); foreach (string s in words) { builder.Append(s.ToLower()).Append(" "); } String cleanedPhrase = builder.ToString().TrimEnd(new char[] { ' ' }); cleanedPhrase += ".png"; //Console.WriteLine("!!!!!!!!!!!!!@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + cleanedPhrase); BitmapImage image = new BitmapImage(); image.BeginInit(); image.UriSource = new Uri(System.IO.Path.Combine( @"C:\Users\aslr\Documents\GitHub\SignLanguageRecognition\phrase-sampler-3.0\phrase_images", cleanedPhrase)); image.EndInit(); phraseImage.Source = image; phrase_name = phrase_list[current_phrase_index]; /*clientInterface.sendData("new_phrase"); * clientInterface.sendData(phrase_name);*/ //String mainDir = System.IO.Path.Combine(@"C:\Users\aslr\Documents\aslr-data", phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); //Console.WriteLine("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& " + kinect.DepthStream.FrameWidth);\ mainDir = System.IO.Path.Combine(dataWritePath, phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); //System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); colorFrameWriter.setCurrentPhrase(phrase_name); depthFrameWriter.setCurrentPhrase(phrase_name); jointDataWriter.setCurrentPhrase(phrase_name); }
/// <summary> /// 初始化MainWindow类的新实例 /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported //当前只有一个传感器被支持 this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier //设置IsAvailableChanged事件通知程序 this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor //打开kinect this.kinectSensor.Open(); // set the status text //设置状态正文 this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames //打开身体框架阅读器 this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier // 设置BodyFramedArrived事件通知程序 this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI // 初始化BodyViewer对象以在UI中显示跟踪的实体 this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures // 为我们的手势初始化手势检测对象 this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow // 初始化主窗口 this.InitializeComponent(); // set our data context objects for display in UI // 设置我们的数据上下文对象以在UI中显示 this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI // 为每个身体创建一个手势检测器(6个身体=> 6个检测器)并创建内容控件以在UI中显示结果 int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid // 在内容网格的前两列中分割手势结果 ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 // 身体的手势结果:0,2,4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 //身体的手势结果:1,3,5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } }
public MainWindow() { main = this; InitializeComponent(); options.Add(new TodoItem() { Title = "Pesquisar Voo para Paris", Color = "#ff00BCF2" }); options.Add(new TodoItem() { Title = "Pesquisar Voo para Roma" }); options.Add(new TodoItem() { Title = "Pesquisar Voo para Londres" }); options.Add(new TodoItem() { Title = "Pesquisar Hotel para Paris" }); options.Add(new TodoItem() { Title = "Pesquisar Hotel para Roma" }); options.Add(new TodoItem() { Title = "Pesquisar Hotel para Londres" }); lbTodoList.ItemsSource = options; // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0; //int col1Row = 0; //int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; //for (int i = 0; i < maxBodies; ++i) //{ GestureResultView result = new GestureResultView(0, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result, this.main, circle, this.Dispatcher); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[0].GestureResultView; //if (i % 2 == 0) //{ // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, 2); //++col0Row; //} //else //{ // Gesture results for bodies: 1, 3, 5 // Grid.SetColumn(contentControl, 1); //Grid.SetRow(contentControl, col1Row); // ++col1Row; //} this.contentGrid.Children.Add(contentControl); //} //init LifeCycleEvents.. lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode) //mmic = new MmiCommunication("localhost",9876,"User1", "ASR"); //PORT TO FUSION - uncomment this line to work with fusion later mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName) mmic.Send(lce.NewContextRequest()); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI gestureResultView = new GestureResultView(false, false, 0.0f, "null"); gestureDetector = new GestureDetector(this.kinectSensor, gestureResultView); ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetector.GestureResultView; Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, 1); this.contentGrid.Children.Add(contentControl); MoveTo(0, 0); DispatcherTimer timer = new DispatcherTimer(); timer.Interval = TimeSpan.FromSeconds(0.1); timer.Tick += timer_Tick; timer.Start(); this.Abcsissa = abcsissa; this.Ordinate = ordinate; if (serialAttached == true) { this.serialport = new SerialPort(); serialport.PortName = "COM3"; serialport.Open(); serialport.BaudRate = 57600; } }//main window
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); this._reader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); this._reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // connect to htk server via tcpClient //this.clientInterface = ClientInterface.getClientInstance(); //clientInterface.connect(); //Console.WriteLine("connect to the client interface \n " + clientInterface.GetHashCode() + "\n"); //clientInterface.disconnect(); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0, col1Row = 0; this.colorFrameWriter = new ColorFrameWriter(); this.depthFrameWriter = new DepthFrameWriter(); this.jointDataWriter = new JointDataWriter(); this.totalCapturedFrames_joints = 0; this.totalCapturedFrames_color = 0; this.totalCapturedFrames_depth = 0; session_number = 1; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; /* if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl);*/ } prevDeleteButton.Click += deletePreviousSample; currentPhraseName.Text = phrase_list[current_phrase_index]; String current_phrase = phrase_list[current_phrase_index]; char[] delims = { '_' }; String[] words = current_phrase.Split(delims); StringBuilder builder = new StringBuilder(); foreach (string s in words) { builder.Append(s.ToLower()).Append(" "); } String cleanedPhrase = builder.ToString().TrimEnd(new char[] { ' ' }); cleanedPhrase += ".png"; //Console.WriteLine("!!!!!!!!!!!!!@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + cleanedPhrase); BitmapImage image = new BitmapImage(); image.BeginInit(); image.UriSource = new Uri(System.IO.Path.Combine( @"C:\Users\aslr\Documents\GitHub\SignLanguageRecognition\phrase-sampler-2.0\phrase_images", cleanedPhrase)); image.EndInit(); phraseImage.Source = image; phrase_name = phrase_list[current_phrase_index]; /*clientInterface.sendData("new_phrase"); clientInterface.sendData(phrase_name);*/ //String mainDir = System.IO.Path.Combine(@"C:\Users\aslr\Documents\aslr-data", phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); //Console.WriteLine("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& " + kinect.DepthStream.FrameWidth);\ mainDir = System.IO.Path.Combine(dataWritePath, phrase_name); //String colorDir = System.IO.Path.Combine(mainDir, "color"); //String depthDir = System.IO.Path.Combine(mainDir, "depth"); System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); //System.IO.Directory.CreateDirectory(mainDir); //System.IO.Directory.CreateDirectory(colorDir); //System.IO.Directory.CreateDirectory(depthDir); colorFrameWriter.setCurrentPhrase(phrase_name); depthFrameWriter.setCurrentPhrase(phrase_name); jointDataWriter.setCurrentPhrase(phrase_name); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // connect to htk server via tcpClient this.clientInterface = ClientInterface.getClientInstance(); clientInterface.connect(); Console.WriteLine("connect to the client interface \n " + clientInterface.GetHashCode() + "\n"); //clientInterface.disconnect(); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //int col0Row = 0, col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; /* if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl);*/ } prevDeleteButton.Click += deletePreviousSample; currentPhraseName.Text = (current_phrase_index+1) + " " + phrase_list[current_phrase_index]; phrase_name = phrase_list[current_phrase_index]; clientInterface.sendData("new_phrase"); clientInterface.sendData(phrase_name); }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; int col1Row = 0; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); // foreach (var database in GestureHelper.gestures) // { GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // } // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; if (i % 2 == 0) { // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; } else { // Gesture results for bodies: 1, 3, 5 Grid.SetColumn(contentControl, 1); Grid.SetRow(contentControl, col1Row); ++col1Row; } this.contentGrid.Children.Add(contentControl); } // Face detection _faceSource = new FaceFrameSource(kinectSensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LookingAway | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; this.sp = new SerialPort(Properties.Settings.Default.ComPort, Properties.Settings.Default.Baudrate, Parity.None, 8, StopBits.One); }