private void btnPredict_Click(object sender, EventArgs e) { Camera.Retrieve(Frame); var ImgFrame = Frame.ToImage <Gray, byte>(); if (ImgFrame != null) { var faces = FaceDetection.DetectMultiScale(ImgFrame, 1.3, 5); if (faces.Count() != 0) { var processedImg = ImgFrame.Copy(faces[0]).Resize(ProcessedImageWidth, ProcessedImageHeight, Emgu.CV.CvEnum.Inter.Cubic); var result = FacialRecognition.Predict(processedImg); if (result.Label.ToString() == "15") { MessageBox.Show("FESTUS"); } else { MessageBox.Show("Test Person"); } } } }
private void recognizeButton_Click(object sender, EventArgs e) { Webcam.Retrieve(Frame); var imageFrame = Frame.ToImage <Gray, byte>(); if (imageFrame != null) { var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); Console.WriteLine($"Faces detected: {faces.Count()}"); if (faces.Count() != 0) { var processedImage = imageFrame.Copy(faces[0]).Resize(ProcessedImageWidth, ProcessedImageHeight, Emgu.CV.CvEnum.Inter.Cubic); try { var result = FaceRecognition.Predict(processedImage); Console.WriteLine(CheckRecognizeResults(result, _threshold)); } catch (Exception ex) { //Console.WriteLine("No faces trained, can't recognize"); } } else { //Console.WriteLine("No faces found"); } } }
private void BtnPredict_Click(object sender, EventArgs e) { Frame = Capture.QueryFrame(); var imageFrame = Frame.ToImage <Gray, byte>(); if (imageFrame != null) { var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); var userDetected = new List <string>(); foreach (var face in faces) { var processImage = imageFrame.Copy(face).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic); var resultEigen = EigenFaceRecognizer.Predict(processImage); if (resultEigen.Label != -1) { userDetected.Add(DataStoreAccess.GetUsername(resultEigen.Label)); } else { userDetected.Add("Unknown"); } } lbResult.Text = string.Join(",", userDetected); } }
private void FinalFrame_NewFrame(object sender, NewFrameEventArgs eventArgs) { var videoFrame = eventArgs.Frame.Clone() as Bitmap; var Analyzeframe = new Image <Bgr, byte>(videoFrame); if (Analyzeframe != null) { var grayFrame = Analyzeframe.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); var eyes = EyeDetection.DetectMultiScale(grayFrame, 1.1, 5); if (FaceSquare == true) { foreach (var face in faces) { Analyzeframe.Draw(face, new Bgr(Color.Red), 3); } } if (EyeSquare == true) { foreach (var eye in eyes) { Analyzeframe.Draw(eye, new Bgr(Color.Blue), 3); } } pbWebCam.Image = Analyzeframe.ToBitmap(); } }
private void Camera_ImageGrabbed(object sender, EventArgs e) { Camera.Retrieve(Frame); var ImgFrame = Frame.ToImage <Bgr, byte>(); if (ImgFrame != null) { var grayFrame = ImgFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); var eyes = EyeDetection.DetectMultiScale(grayFrame, 1.3, 5); if (FaceSquare) { foreach (var face in faces) { ImgFrame.Draw(face, new Bgr(Color.BurlyWood), 3); } } if (EyeSquare) { foreach (var eye in eyes) { ImgFrame.Draw(eye, new Bgr(Color.Yellow), 3); } } CameraBox.Image = ImgFrame.ToBitmap(); } }
private void Timer_Tick1(object sender, EventArgs e) { Webcam.Retrieve(Frame); var imageFrame = Frame.ToImage <Gray, byte>(); if (imageFrame != null) { var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); if (faces.Count() != 0) { Image <Gray, byte> processedImage = imageFrame.Copy(faces[0]).Resize(ProcessedImageWidth, ProcessedImageHeight, Inter.Cubic); result = FaceRecognition.Predict(processedImage); foreach (var id in listOfIds) { if (result.Label == id) { isUnknown = false; isPredicted = true; } else { isUnknown = true; } } } } }
public void Webcam_ImageGrabbed(object sender, EventArgs e) { Webcam.Retrieve(Frame); var ImageFrame = Frame.ToImage <Bgr, byte>(); if (ImageFrame != null) { var grayFrame = ImageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); // Et array af firkanter, som holder alle ansigter den finder. // var eyes = EyeDetection.DetectMultiScale(grayFrame, 1.3, 5); // Samme med øjne foreach (var face in faces) { ImageFrame.Draw(face, new Bgr(Color.LimeGreen), 3); if (result.Label != -1 || isPredicted && doneTraining) { try { Graphics graphicImage1 = Graphics.FromImage(ImageFrame.Bitmap); graphicImage1.DrawString(listOfNames[result.Label - 1], new Font("Arial", 15, FontStyle.Bold), new SolidBrush(Color.LimeGreen), new Point(face.X, face.Y)); } catch (Exception exception) { //No action as the error is useless, it is simply an error in //no data being there to process and this occurss sporadically } } if (result.Label == 0 || result.Label == -1) { Graphics graphicImage2 = Graphics.FromImage(ImageFrame.Bitmap); graphicImage2.DrawString("Unknown", new Font("Arial", 15, FontStyle.Bold), new SolidBrush(Color.Red), new Point(face.X, face.Y)); } } // ID checker Graphics graphicImage = Graphics.FromImage(ImageFrame.Bitmap); graphicImage.DrawString($"Face ID: " + result.Label.ToString(), new Font("Arial", 15, FontStyle.Bold), new SolidBrush(Color.LimeGreen), new Point(0, 50)); WebcamBox.Image = ImageFrame.ToBitmap(); // shows frames in the UI. } }
void updateImage(object sender, EventArgs arg) { var frame = cap.QueryFrame(); if (frame != null) { var img = frame.ToImage <Bgr, byte>(); //img = img.Flip(Emgu.CV.CvEnum.FlipType.Horizontal); Image <Gray, byte> grayframe = img.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayframe, 1.3, 5); var juuls = JuulDetection.DetectMultiScale(grayframe, 1.5, 5); foreach (var face in faces) { PointF centerFace = new PointF(face.Location.X + face.Width / 2, face.Location.Y + face.Height / 2); calculatePoint(centerFace, Fire); if (juuls.Count() < 1) { Fire = false; } foreach (var juul in juuls) { if (juul.IntersectsWith(face) && !Fire) { juulTimer.Start(); if (juulTimer.Elapsed.Seconds > 1) { Fire = true; } //PointF centerFace = new PointF(face.Location.X + face.Width / 2, face.Location.Y + face.Height / 2); //calculatePoint(centerFace, false); } //else if (juul.IntersectsWith(face) && Fire) //{ // Console.WriteLine("AIMING AND FIRING!!!!!"); // //PointF Face = new PointF(face.Location.X + face.Width / 2, face.Location.Y + face.Height / 2); // calculatePoint(centerFace, true); //} if (!juul.IntersectsWith(face)) { Fire = false; juulTimer.Stop(); juulTimer.Reset(); } } img.Draw(face, new Bgr(0, double.MaxValue, 0), 3); } foreach (var juul in juuls) { img.Draw(juul, new Bgr(100, double.MaxValue, 10), 3); } videoFrame.Image = img; } }
private void Timer_Tick(object sender, EventArgs e) { Webcam.Retrieve(Frame); var imageFrame = Frame.ToImage <Gray, byte>(); if (TimerCounter < TimeLimit) { TimerCounter++; if (imageFrame != null) { Rectangle[] faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); if (faces.Any()) { var processedImage = imageFrame.Copy(faces[0]).Resize(ProcessedImageWidth, ProcessedImageHeight, Inter.Cubic); // Will zoom into the rectangle it finds to only see that. Faces.Add(processedImage.Mat); IDs.Add(userId); ScanCounter++; OutputBox.AppendText($"{ScanCounter} Successful Scans Taken...{Environment.NewLine}"); OutputBox.ScrollToCaret(); } } } else { FaceRecognition.Train(new VectorOfMat(Faces.ToArray()), new VectorOfInt(IDs.ToArray())); // Here we finally train on face and ID collection we just captures and is written to the pathfile "YMLPath" FaceRecognition.Write(YMLPath); Timer.Stop(); TimerCounter = 0; //IDBox.Clear(); nameBox.Clear(); TrainButton.Enabled = !TrainButton.Enabled; //IDBox.Enabled = !IDBox.Enabled; nameBox.Enabled = !nameBox.Enabled; OutputBox.AppendText($"Training Complete! {Environment.NewLine}"); //MessageBox.Show("Training Complete"); doneTraining = true; Timer = new Timer(); Timer.Interval = 500; // ticks every 0.5 sec Timer.Tick += Timer_Tick1; // this method gets called every time the timer fires. Timer.Start(); } }
private void Webcam_ImageGrabbed(object sender, EventArgs e) { currentPeople = new List <Person>(); Webcam.Retrieve(Frame); var imageFrame = Frame.ToImage <Bgr, byte>(); if (imageFrame != null) { var grayFrame = imageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); //var eyes = EyeDetection.DetectMultiScale(grayFrame, 1.3, 5); if ((flags & (byte)~Flags.FACE_SQUARE) != 0 && faces.Count() != 0) { foreach (var face in faces) { var processedImage = grayFrame.Copy(face).Resize(ProcessedImageWidth, ProcessedImageHeight, Emgu.CV.CvEnum.Inter.Cubic); try { var result = FaceRecognition.Predict(processedImage); //var id = CheckRecognizeResults(result, _threshold); var recognisedPerson = CheckRecognizeResults(result, _threshold); var personNameIfFound = recognisedPerson == null ? "Spooky ghost no. " + result.Label.ToString() : recognisedPerson.Name; imageFrame.Draw(personNameIfFound, face.Location, FontFace.HersheyTriplex, 1.0, new Bgr(Color.Chartreuse)); //since we dont store person objects over restarts we wont have any info about the id } catch (Exception ex) { Console.WriteLine("Message: " + ex.Message + " Data: " + ex.Data); } imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); } } /*if (EyeSquare) * { * foreach (var eye in eyes) * { * imageFrame.Draw(eye, new Bgr(Color.Yellow), 3); * } * }*/ picLiveFeed.Image = imageFrame.ToBitmap(); } }
private Image <Bgr, byte> TrainFace(Image <Bgr, byte> imageFrame) { try { if (imageFrame == null) { return(null); } var grayFrame = imageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.2, 5); if (faces.Count() == 1) { imageFrame.Draw(faces[0], new Bgr(Color.BurlyWood), 3); _qz.TRFaces.WaitCycle++; if (_qz.TRFaces.WaitCycle < 20) { return(imageFrame); } _qz.TRFaces.WaitCycle = 0; _qz.TRFaces.TrainNbr++; var trfImage = grayFrame.Copy(faces[0]).Resize( _qz.TRF_IMAGE_WIDTH, _qz.TRF_IMAGE_HEIGHT, Emgu.CV.CvEnum.Inter.Cubic); _qz.TRFaces.TrainFace.Add(trfImage); //_qz.FrmTrain.SetPicture(_qz.TRFaces.TrainNbr, trfImage); } if (_qz.TRFaces.TrainNbr >= 3) { _qz.Mode = EnMode.None; _qz.FormMainUpdate(); } } catch (Exception) { } return(imageFrame); }
private void Timer_Tick(object sender, EventArgs e) { WebCam.Retrieve(Frame); var imageFrame = Frame.ToImage <Gray, byte>(); if (TimerCounter < TimeLimit) { TimerCounter++; if (imageFrame != null) { //imageBox1.Image = imageFrame; var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); if (faces.Count() > 0) { var procImage = imageFrame.Copy(faces[0]).Resize(ProcessImageWidth, ProcessImageHeight, Emgu.CV.CvEnum.Inter.Cubic); Faces.Add(procImage); //Ids.Add(Convert.ToInt32(tbID.Text)); //Names.Add(tbName.Text); ScanCounter++; //tbOutput.AppendText($"{ScanCounter} successfull scabs taken .. {Environment.NewLine}"); //tbOutput.ScrollToCaret(); } } } else { // train faces !!! FaceRecognition.Train(Faces.ToArray(), Ids.ToArray()); FaceRecognition.Write(YMLPath); //foreach (var face in Faces) //{ //} Timer.Stop(); TimerCounter = 0; //tbID.Enabled = true; //btnBeginTraining.Enabled = true; //tbOutput.AppendText($"Taining Complete {Environment.NewLine}"); MessageBox.Show("Training completed"); } }
private Image <Bgr, byte> Question(Image <Bgr, byte> imageFrame) { try { if (imageFrame == null) { return(null); } // copy frame, for detection objects var imgCommit = imageFrame.Copy(); //.Resize(300, 300, Inter.Linear, true); imgCommit = imgCommit.Resize(300, 300, Inter.Linear, true); // time elapsed long timeElapsed = _qz.TQuestion.QStopWatch.ElapsedMilliseconds; _qz.TQuestion.QStopWatchTimeDiv = timeElapsed - _qz.TQuestion.QStopWatchTime; _qz.TQuestion.QStopWatchTime = timeElapsed; // draw 3 parts int w = imageFrame.Width / 3; int h = imageFrame.Height; //imageFrame._EqualizeHist(); var grayFrame = imageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); int index = 1; foreach (var face in faces) { // face recognition - detect faces/answer var result = QuestionFaceRecognition0(grayFrame, face); // face recognition1 - detect quetion/answer QuestionFaceRecognition1(imageFrame, w, face, result); } // cycle QuestionCycle(imageFrame, imgCommit, w, h); } catch (Exception) { } return(imageFrame); }
private void Capture_ImageGrabbed(object sender, EventArgs e) { Frame = Capture.QueryFrame(); var imageFrame = Frame.ToImage <Bgr, byte>(); if (imageFrame != null) { var grayFrame = imageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); foreach (var face in faces) { CvInvoke.Rectangle(imageFrame, face, new MCvScalar(255, 0, 0, 255), 2); } imbWebcam.Image = imageFrame; } }
private void Timer_Tick(object sender, EventArgs e) { Frame = Capture.QueryFrame(); var imageFrame = Frame.ToImage <Gray, byte>(); if (TimerCounter < TimeLimit) { TimerCounter++; if (imageFrame != null) { var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); if (faces.Length > 0) { var processImage = imageFrame.Copy(faces[0]).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic); Faces.Add(processImage); Labels.Add(Convert.ToInt32(txtUserId.Text)); ScanCounter++; rtbOutPut.AppendText($"{ScanCounter} Success Scan Taken... {Environment.NewLine}"); rtbOutPut.ScrollToCaret(); } } } else { var trainFaces = ConvertImageToMat(Faces); foreach (var face in trainFaces) { DataStoreAccess.SaveFace(Convert.ToInt32(txtUserId.Text), txtUserName.Text, ConvertImageToBytes(face.Bitmap)); } EigenFaceRecognizer.Train(trainFaces.ToArray(), Labels.ToArray()); EigenFaceRecognizer.Write(YMLPath); Timer.Stop(); TimerCounter = 0; btnPredict.Enabled = true; rtbOutPut.AppendText($"Training Completed! {Environment.NewLine}"); MessageBox.Show("Training Completed!"); } }
private Image <Bgr, byte> TestGroup(Image <Bgr, byte> imageFrame) { try { if (imageFrame == null) { return(null); } var grayFrame = imageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.2, 5); foreach (var face in faces) { var procImg = grayFrame.Copy(face).Resize( _qz.TRF_IMAGE_WIDTH, _qz.TRF_IMAGE_HEIGHT, Emgu.CV.CvEnum.Inter.Cubic); var result = _qz.QZGroupArr[_qz.QZTGroup.TGroupNbr].FaceRecoEigen.Predict(procImg); try { imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); imageFrame.Draw($"{_qz.QZFaceArr[result.Label].Name}", face.Location, Emgu.CV.CvEnum.FontFace.HersheyTriplex, 1.0, new Bgr(Color.Red)); // if (result.Distance > 1.0) // imageFrame.Draw($"{_qz.QSet.Faces[result.Label].Name}", face.Location, Emgu.CV.CvEnum.FontFace.HersheyTriplex, 1.0, new Bgr(Color.Red)); } catch (Exception) { } } } catch (Exception) { } return(imageFrame); }
private void btnPredictFace_Click(object sender, EventArgs e) { WebCam.Retrieve(Frame); var imageFrame = Frame.ToImage <Gray, byte>(); if (imageFrame != null) { var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); if (faces.Count() != 0) { foreach (var fc in faces) { var procImg = imageFrame.Copy(fc).Resize(ProcessImageWidth, ProcessImageHeight, Emgu.CV.CvEnum.Inter.Cubic); var result = FaceRecognition.Predict(procImg); if (result.Label.ToString() == "1") { //tbOutput.AppendText($"FACE {result.Label} {System.Environment.NewLine}"); //tbOutput.ScrollToCaret(); //imageFrame.Draw("CARL", new Point(faces[0].X, faces[0].Y), 2); //imageFrame.Draw("dd",) // } } } } else { //tbOutput.AppendText($"????? {System.Environment.NewLine}"); } } else { //tbOutput.AppendText($"????? {System.Environment.NewLine}"); } }
private void Timer_Tick(object sender, EventArgs e) { Camera.Retrieve(Frame); var ImgFrame = Frame.ToImage <Gray, byte>(); if (TimerCounter < TimeLimit) { TimerCounter++; if (ImgFrame != null) { var faces = FaceDetection.DetectMultiScale(ImgFrame, 1.3, 5); if (faces.Count() > 0) { var processedImage = ImgFrame.Copy(faces[0]).Resize(ProcessedImageWidth, ProcessedImageHeight, Emgu.CV.CvEnum.Inter.Cubic); Faces.Add(processedImage); Ids.Add(Convert.ToInt32(IdBox.Text)); ScanCounter++; OutputBox.AppendText($"{ScanCounter} Succesful Scans Taken...{Environment.NewLine}"); OutputBox.ScrollToCaret(); } } } else { FacialRecognition.Train(Faces.ToArray(), Ids.ToArray()); FacialRecognition.Write(YMLPath); Timer.Stop(); TimerCounter = 0; btnTrain.Enabled = !btnTrain.Enabled; IdBox.Enabled = !IdBox.Enabled; OutputBox.AppendText($"Training Complete! {Environment.NewLine}"); MessageBox.Show("Training Complete"); } }
private void WebCam_ImageGrabbed(object sender, EventArgs e) { WebCam.Retrieve(Frame); var imageFrame = Frame.ToImage <Bgr, byte>(); if (imageFrame != null) { var grayFrame = imageFrame.Convert <Gray, byte>(); var faces = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5); foreach (var face in faces) { imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); try { var procImg = grayFrame.Copy(face).Resize(ProcessImageWidth, ProcessImageHeight, Emgu.CV.CvEnum.Inter.Cubic); if (_qz2018.FrmTrain != null) { _qz2018.FrmTrain.SetPicture(1, procImg); } //imageBox1.Image = procImg; var result = FaceRecognition.Predict(procImg); imageFrame.Draw($"<{result.Label}>", face.Location, Emgu.CV.CvEnum.FontFace.HersheyTriplex, 1.0, new Bgr(Color.Red)); } catch (Exception ex) { } } //imageWebCam.Image = imageFrame; } }