public MainWindow() { currentGroupId = currentGroupName; InitializeComponent(); StartTimer(); this.backgroundMusic = SoundProvider.Ukulele; this.backgroundMusic.Volume = 0.05; this.backgroundMusic.MediaEnded += new EventHandler((object sender, EventArgs e) => { this.backgroundMusic.Position = TimeSpan.Zero; this.backgroundMusic.Play(); }); this.backgroundMusic.Play(); t.Elapsed += T_Elapsed; // Create grabber. _grabber = new FrameGrabber <LiveCameraResult>(); updateMode(AppMode.Participants); // Set up a listener for when the client receives a new frame. _grabber.NewFrameProvided += (s, e) => { if (_mode == AppMode.EmotionsWithClientFaceDetect) { // Local face detection. var rects = _localFaceDetector.DetectMultiScale(e.Frame.Image); // Attach faces to frame. e.Frame.UserData = rects; } // The callback may occur on a different thread, so we must use the // MainWindow.Dispatcher when manipulating the UI. this.Dispatcher.BeginInvoke((Action)(() => { // Display the image in the left pane. LeftImage.Source = e.Frame.Image.ToBitmapSource(); // If we're fusing client-side face detection with remote analysis, show the // new frame now with the most recent analysis available. if (_fuseClientRemoteResults) { RightImage.Source = VisualizeResult(e.Frame); } })); if (DateTime.Now - currentTimeTaskStart > currentTimerTask) { if (gameState == GameState.Explain) { roundStart = DateTime.Now; nextRound(); } else if (gameState == GameState.RoundBegin) { currentTimerTask = TimeSpan.FromSeconds(15); currentTimeTaskStart = DateTime.Now; gameState = GameState.Game; roundStart = DateTime.Now; } else if (gameState == GameState.Game) { currentTimerTask = TimeSpan.FromSeconds(6); currentTimeTaskStart = DateTime.Now; gameState = GameState.RoundEnd; scoringSystem.AddRoundToGameScore(); } else if (gameState == GameState.RoundEnd) { if (roundNumber == NumOfRounds) { this.sound = SoundProvider.TheWinner; this.sound.Play(); currentTimerTask = TimeSpan.FromSeconds(3); gameState = GameState.GameEnd; this.Dispatcher.BeginInvoke((Action)(() => { StartEndImages(); button.Visibility = Visibility.Visible; })); } else { nextRound(); roundStart = DateTime.Now; } } } }; // Set up a listener for when the client receives a new result from an API call. _grabber.NewResultAvailable += (s, e) => { this.Dispatcher.BeginInvoke((Action)(() => { if (e.TimedOut) { MessageArea.Text = "API call timed out."; } else if (e.Exception != null) { string apiName = ""; string message = e.Exception.Message; var faceEx = e.Exception as FaceAPIException; var emotionEx = e.Exception as Microsoft.ProjectOxford.Common.ClientException; var visionEx = e.Exception as Microsoft.ProjectOxford.Vision.ClientException; if (faceEx != null) { apiName = "Face"; message = faceEx.ErrorMessage; } else if (emotionEx != null) { apiName = "Emotion"; message = emotionEx.Error.Message; } else if (visionEx != null) { apiName = "Computer Vision"; message = visionEx.Error.Message; } MessageArea.Text = string.Format("{0} API call failed on frame {1}. Exception: {2}", apiName, e.Frame.Metadata.Index, message); } else { _latestResultsToDisplay = e.Analysis; // Display the image and visualization in the right pane. if (!_fuseClientRemoteResults) { RightImage.Source = VisualizeResult(e.Frame); } if (gameState == GameState.Game || gameState == GameState.RoundBegin) { bool drawIndicator = false; if (gameState == GameState.Game) { drawIndicator = true; } RightImage.Source = VisualizeTimer(drawIndicator); } } })); }; // Create local face detector. _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml"); }