///<summary> ///Releasing the media elements when closing the replay window ///Switch to recording mode ///Written by Baptiste Germond ///</summary> private void quit() { isReplaying = false; statisticsPath = ""; if (skeletonScrolling != null) { skeletonScrolling.Stop(); skeletonScrolling = null; } (TrainingSideTool.Get().FindResource("StopReplayButtonAction") as Storyboard).Begin(); DrawingSheetView.Get().ReplayVideo.Close(); DrawingSheetView.Get().ReplayVideo.Source = null; DrawingSheetView.Get().ReplayAudio.Close(); DrawingSheetView.Get().ReplayAudio.Source = null; ReplayView.Get().SoundCheckbox.IsChecked = false; SideToolsViewModel.Get().enableTrackingAndTrainingTab(); TrainingSideToolViewModel.Get().recordingMode(); DrawingSheetAvatarViewModel.Get().normalMode(); // reactivate the sensors KinectDevice.sensor.SkeletonStream.Enable(); if (faceTrack) { KinectDevice.faceTracking = true; } TrackingSideToolViewModel.get().SpeedRate = speedRateActive; // reactivate the audience if (TrainingSideToolViewModel.audienceOn) { TrainingSideToolViewModel.audienceOn = false; GeneralSideTool.Get().AudienceControlCheckBox.IsChecked = true; } }
public void StopVideoTraining() { SideToolsViewModel.Get().enableTrackingAndTrainingTab(); (TrainingSideTool.Get().FindResource("StopVideoTraining") as Storyboard).Begin(); VideosNameList = new List <VideosList>(); DrawingSheetAvatarViewModel.Get().isTraining = false; SkeletonList = null; }
/// <summary> /// Launch the record when the waiting time is over /// </summary> public void launchRecordingAfterWait() { inRecord = true; allFalse(); if (FaceRecognition == true) { TrackingSideToolViewModel tracking = TrackingSideToolViewModel.get(); tracking.FaceTracking = true; tracking.emo = true; //tracking.Mouth = true; //tracking.Mouth2 = true; //Those feature are not working for now //tracking.pupilR = true; tracking.LookR = true; } if (VoiceRecognition == true) { TrackingSideToolViewModel tracking = TrackingSideToolViewModel.get(); //tracking.PeakDetection = true; //Create lags when in record tracking.SpeedRate = true; tracking.ShowTextOnScreen = true; /*tracking.VoiceMonotony = true; * tracking.BadVoiceReflex = true;*///Features are not working } if (BodyRecognition == true) { TrackingSideToolViewModel.get().UseFeedback = true; } SideToolsViewModel.Get().chooseTraining(); TrainingSideToolViewModel tstvm = TrainingSideToolViewModel.Get(); tstvm.limitedTimeHours = "0"; tstvm.limitedTimeMinutes = MinRecord.ToString(); tstvm.limitedTimeSeconds = SecRecord.ToString(); tstvm.isTimeLimited = true; /*Depending on which view the user was using when he start his session record * the session recording will record the view the user was using*/ if (MainWindow.drawingSheet.getMode() == SheetMode.AvatarMode) { tstvm.ToggleAvatarOpenGLRecording = true; } else if (MainWindow.drawingSheet.getMode() == SheetMode.StreamMode) { tstvm.ToggleStreamRecording = true; } tstvm.ToggleAudioRecording = true; //Launch the record ButtonAutomationPeer peer = new ButtonAutomationPeer(TrainingSideTool.Get().StartRecordingButton); IInvokeProvider invokeProv = peer.GetPattern(PatternInterface.Invoke) as IInvokeProvider; invokeProv.Invoke(); }
/// <summary> /// Event raised at each frame sent by the kinect /// </summary> /// <param name="sender"></param> /// <param name="evt">ColorImageFrame we will display </param> public override void draw(object sender, EventArgs evt) { if (this.dsv.getMode().CompareTo(SheetMode.StreamMode) == 0 || (TrainingSideToolViewModel.Get().isRecording&& TrainingSideTool.Get().StreamRecordingCheckbox.IsChecked.Value) || ReplayViewModel.isReplaying) { Skeleton avatar = null; if (skt != null) { avatar = skt.skeleton; } ColorImageFrameReadyEventArgs e = (ColorImageFrameReadyEventArgs)evt; using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { if (colorFrame != null) { // Copy the pixel data from the image to a temporary array colorFrame.CopyPixelDataTo(this.colorPixels); // Write the pixel data into our bitmap this.colorBitmap.WritePixels( new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight), this.colorPixels, this.colorBitmap.PixelWidth * sizeof(int), 0); using (DrawingContext dc = drawingGroup.Open()) { dc.DrawImage(this.colorBitmap, new Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight)); } // if the eventHandler is not null it means that we are recording the session, so we raise an event including a bitmap of our rendering if (backgroundDrawEventStream != null) { backgroundDrawEventStream(null, ImageToBitmap(colorFrame)); } ShowFeedbacksOnVideoStream(avatar); } } } }