private static void Run(string filename) { int timePerFrame; VideoCapture capture; if (!string.IsNullOrWhiteSpace(filename) && File.Exists(filename)) { // If filename exists, use that as a source of video. capture = InitializeVideoCapture(filename); // Allow just enough time to paint the frame on the window. timePerFrame = 1; } else { // Otherwise use the webcam. capture = InitializeCapture(); // Time required to wait until next frame. timePerFrame = (int)Math.Round(1000 / capture.Fps); } // Input was not initialized. if (capture == null) { Console.ReadKey(); return; } // Initialize face detection algorithm. CascadeClassifier haarCascade = InitializeFaceClassifier(); // List of simple face filtering algorithms. var filtering = new SimpleFaceFiltering(new IFaceFilter[] { new TooSmallFacesFilter(20, 20) }); // List of simple face tracking algorithms. var trackingChanges = new SimpleFaceTracking(new IFaceTrackingChanged[] { new TrackNumberOfFaces(), new TrackDistanceOfFaces { Threshold = 2000 } }); // Open a new window via OpenCV. using (Window window = new Window("capture")) { using (Mat image = new Mat()) { while (true) { // Get current frame. capture.Read(image); if (image.Empty()) { continue; } // Detect faces var faces = DetectFaces(haarCascade, image); // Filter faces var state = faces.ToImageState(); state = filtering.FilterFaces(state); // Determine change var hasChange = trackingChanges.ShouldUpdateRecognition(state); if (hasChange) { Console.WriteLine("Changes detected..."); // Identify faces if changed and previous identification finished. if (_faceRecognitionTask == null && !string.IsNullOrWhiteSpace(FaceSubscriptionKey)) { _faceRecognitionTask = StartRecognizing(image); } } using (var renderedFaces = RenderFaces(state, image)) { // Update popup window. window.ShowImage(renderedFaces); } // Wait for next frame and allow Window to be repainted. Cv2.WaitKey(timePerFrame); } } } }
private static void Run(string filename) { string personGroupId = "nbgaccn"; _faceClient.PersonGroup.CreateAsync(personGroupId, "NBG Accenture"); Task <Person> ZachariasSiatris = _faceClient.PersonGroupPerson.CreateAsync(personGroupId, "Zacharias Siatris"); List <ZSPerson> ZSPersonsList = new List <ZSPerson>(); //Zacharias Siatris ZSPerson zachariasSiatris = new ZSPerson(); zachariasSiatris.name = "Zacharias Siatris"; zachariasSiatris.company = " Accenture Inc."; zachariasSiatris.description = " ROTATING TO THE NEW!!"; zachariasSiatris.id = " 1123456979"; zachariasSiatris.isDangerous = true; zachariasSiatris.projectTeam = " AI - Innovation Team"; zachariasSiatris.nbgSection = " In the \"NEW\" Management"; zachariasSiatris.photoPathId = @"C:\Users\zacharias.siatris\Desktop\FaceAppPhotos\ZachariasSiatris\ZackID.jpg"; ZSPersonsList.Add(zachariasSiatris); const string zachariasSiatrisImageDir = @"C:\Users\zacharias.siatris\Desktop\FaceAppPhotos\ZachariasSiatris"; foreach (string imagePath in Directory.GetFiles(zachariasSiatrisImageDir, "*.jpg")) { using (Stream s = File.OpenRead(imagePath)) { _faceClient.PersonGroupPerson.AddFaceFromStreamAsync( personGroupId, ZachariasSiatris.Result.PersonId, s); } } _faceClient.PersonGroup.TrainAsync(personGroupId); TrainingStatus trainingStatus = null; while (true) { trainingStatus = _faceClient.PersonGroup.GetTrainingStatusAsync(personGroupId).Result; if (trainingStatus.Status != TrainingStatusType.Running) { break; } Task.Delay(1000); } int timePerFrame; VideoCapture capture; if (!string.IsNullOrWhiteSpace(filename) && File.Exists(filename)) { // If filename exists, use that as a source of video. capture = InitializeVideoCapture(filename); // Allow just enough time to paint the frame on the window. timePerFrame = 1; } else { // Otherwise use the webcam. capture = InitializeCapture(0); // Time required to wait until next frame. timePerFrame = (int)Math.Round(1000 / capture.Fps); } // Input was not initialized. if (capture == null) { Console.ReadKey(); return; } // Initialize face detection algorithm. CascadeClassifier haarCascade = InitializeFaceClassifier(); // List of simple face filtering algorithms. var filtering = new SimpleFaceFiltering(new IFaceFilter[] { new TooSmallFacesFilter(20, 20) }); // List of simple face tracking algorithms. var trackingChanges = new SimpleFaceTracking(new IFaceTrackingChanged[] { new TrackNumberOfFaces(), new TrackDistanceOfFaces { Threshold = 2000 } }); InfoPanel panel = new InfoPanel(); panel.Show(); // Open a new window via OpenCV. using (Window window = new Window("capture")) { using (Mat image = new Mat()) { while (true) { // Get current frame. capture.Read(image); if (image.Empty()) { continue; } // Detect faces var faces = DetectFaces(haarCascade, image); // Filter faces var state = faces.ToImageState(); state = filtering.FilterFaces(state); // Determine change var hasChange = trackingChanges.ShouldUpdateRecognition(state); if (hasChange) { Console.WriteLine("Changes detected..."); Random random = new Random(); Font currentFont = panel.richTextBox1.SelectionFont; FontStyle newFontStyle = (FontStyle)(currentFont.Style | FontStyle.Bold); panel.richTextBox1.SelectionFont = new Font(currentFont.FontFamily, 12, newFontStyle); panel.richTextBox1.AppendText("\n- Changes detected..."); for (int i = 0; i < 20; i++) { panel.richTextBox5.AppendText(random.Next(0, 2).ToString()); } // Identify faces if changed and previous identification finished. if (_faceRecognitionTask == null && !string.IsNullOrWhiteSpace(FaceSubscriptionKey)) { _faceRecognitionTask = StartRecognizing(image, panel, ZSPersonsList); } } using (var renderedFaces = RenderFaces(state, image)) { // Update popup window. window.ShowImage(renderedFaces); } // Wait for next frame and allow Window to be repainted. Cv2.WaitKey(timePerFrame); } } } }