protected void UploadButton_Click(object sender, EventArgs e) { try { if (!((FileUpload1.PostedFile.ContentType == "image/jpeg") || (FileUpload1.PostedFile.ContentType == "image/png") || (FileUpload1.PostedFile.ContentType == "image/gif") || (FileUpload1.PostedFile.ContentType == "image/bmp"))) throw new Exception("Неизвестный тип файла"); string PhotoFolder = Request.PhysicalApplicationPath + @"\photos\"; if (!Directory.Exists(PhotoFolder)) Directory.CreateDirectory(PhotoFolder); string extention = Path.GetExtension(FileUpload1.FileName); string uniqueName = Path.ChangeExtension(FileUpload1.FileName, DateTime.Now.Ticks.ToString()); string upFile = Path.Combine(PhotoFolder, uniqueName + extention); FileUpload1.SaveAs(upFile); //Распознование лиц HaarCascade haarCascade = new HaarCascade(Request.PhysicalApplicationPath + @"\haarcascade_frontalface_alt2.xml"); Image<Bgr, Byte> image = new Image<Bgr, Byte>(upFile); Image<Gray, Byte> grayImage = image.Convert<Gray, Byte>(); Bitmap srcImage = image.ToBitmap(); var detectedFaces = grayImage.DetectHaarCascade(haarCascade)[0]; foreach (var face in detectedFaces) { Image<Bgr, Byte> imFace = image.Copy(face.rect); //Пикселизация (фактор подобран эмпирически) //при данном факторе одиноково хорошо пикселизируются и большие и маленькие лица double factor = 0.02 + (double)10 / (double)face.rect.Height; imFace = imFace.Resize(factor, 0); imFace = imFace.Resize(1 / factor, 0); Bitmap faceBitmap = imFace.ToBitmap(); using (Graphics grD = Graphics.FromImage(srcImage)) { grD.DrawImage(faceBitmap, new Point(face.rect.Left, face.rect.Top)); } } string uniqueName_processed = uniqueName + "_processed"; srcImage.Save(Path.Combine(PhotoFolder, uniqueName_processed + extention)); imgTitle.Visible = true; Image1.ImageUrl = "photos/" + uniqueName_processed + extention; } catch (Exception ex) { Session["ErrorMsg"] = ex.Message; Response.Redirect("~/error.aspx", true); } }
public EyesDetection(bool type) { this._image = null; this._faces = new HaarCascade(@currentDirectory + @"\Resources\haarcascade_frontalface_alt2.xml"); this._eyes = new HaarCascade(@currentDirectory + @"\Resources\haarcascade_eye.xml"); }
private void frmCamShift_Load(object sender, EventArgs e) { cap = new Capture(0); //haar = new HaarCascade("haarcascade_frontalface_alt.xml"); haar = new HaarCascade("haarcascade.xml"); cap.FlipHorizontal = true; }
public static Image<Gray, byte> DetectAndTrimFace(int[] pixels, Size initialSize, Size outputSize, String haarcascadePath) { var inBitmap = ConvertToBitmap(pixels, initialSize.Width, initialSize.Width); //for testing purposes I can the picture to a folder //inBitmap.Save(@"E:\data\phototest\received.bmp"); var grayframe = new Image<Gray, byte>(inBitmap); var haar = new HaarCascade(haarcascadePath); var faces = haar.Detect(grayframe, 1.2, 3, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(30, 30)); if (faces.Count() != 1) { return null; } var face = faces[0]; var returnImage = grayframe.Copy(face.rect).Resize(outputSize.Width, outputSize.Height, INTER.CV_INTER_CUBIC); //cleanup managed resources haar.Dispose(); grayframe.Dispose(); return returnImage; }
public MainForm() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); // grabber = new Capture(); // grabber.QueryFrame(); // Application.Idle += new EventHandler(FrameGrabber); this.WindowState = FormWindowState.Maximized; _videoPacketDecoderWorker = new VideoPacketDecoderWorker(PixelFormat.BGR24, true, OnVideoPacketDecoded); _videoPacketDecoderWorker.Start(); _droneClient = new DroneClient("192.168.1.1"); _droneClient.NavigationPacketAcquired += OnNavigationPacketAcquired; _droneClient.VideoPacketAcquired += OnVideoPacketAcquired; _droneClient.NavigationDataAcquired += data => _navigationData = data; tmrStateUpdate.Enabled = true; tmrVideoUpdate.Enabled = true; _playerForms = new List<PlayerForm>(); _videoPacketDecoderWorker.UnhandledException += UnhandledException; _droneClient.Start(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { var sourceImage = new Bitmap("C:\\Steve_Wozniak.jpg"); string haarcascade = "haarcascade_frontalface_default.xml"; using (HaarCascade face = new HaarCascade(haarcascade)) { var image = new Image<Rgb, Byte>(sourceImage); using (var gray = image.Convert<Gray, Byte>()) { var detectedFaces = face.Detect( gray, 1.1, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); var firstFace = detectedFaces[0]; System.Drawing.Bitmap bmpImage = image.Bitmap; System.Drawing.Bitmap bmpCrop = bmpImage.Clone(firstFace.rect, bmpImage.PixelFormat); var cropedImage = new Image<Rgb, Byte>(bmpCrop); MainImage.Source = ToBitmapSource(sourceImage); DetectedFaceImage.Source = ToBitmapSource(cropedImage.Bitmap); } } }
public static Image<Bgr, byte> DetectAndDrawFaces(Image<Bgr, byte> image, HaarCascade face, HaarCascade eye) { Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale gray._EqualizeHist(); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.1, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); foreach (MCvAvgComp f in facesDetected[0]) { //draw the face detected in the 0th (gray) channel with blue color image.Draw(f.rect, new Bgr(Color.Blue), 2); //Set the region of interest on the faces if (eye != null) DetectAndDrawEyes(image, gray, f, eye); } return image; }
public Image<Bgr, Byte> FaceDetection(Image Image) { face = new HaarCascade("haarcascade_frontalface_default.xml"); Utility UTl = new Utility(); //Get the current frame form capture device Image<Bgr, Byte> currentFrame = UTl.ImageToBgrByte(Image); //Convert it to Grayscale gray = currentFrame.Convert<Gray, Byte>(); //Face Detector MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face,1.2,10,Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,new Size(20, 20)); //Action for element detected try { MCvAvgComp f = facesDetected[0][0]; result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //draw the face detected in the 0th (gray) channel with blue color currentFrame.Draw(f.rect, new Bgr(Color.White), 2); } catch (Exception ex) { MessageBox.Show("Camera Error: Empty frames arrived" + ex.Message.ToString(), "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } return currentFrame; }
private void Form1_Load(object sender, EventArgs e) { capturar = new Capture(0); haarCascade1 = new HaarCascade("haarcascade_frontalface_default.xml"); timer1.Interval = 40; timer1.Enabled=true; }
public Form1() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); calcface = new HaarCascade("haarcascade_frontalface_default.xml"); eyeWithGlass = new CascadeClassifier("haarcascade_eye_tree_eyeglasses.xml"); nose = new CascadeClassifier("haarcascade_mcs_nose.xml"); mouth = new CascadeClassifier("haarcascade_mcs_mouth.xml"); mydb = new DBConn(); recogNameResult = new List<string>(); recogDistanceResult = new List<double>(); minEye = new Size(10, 10); maxEye = new Size(225, 225); minNose = new Size(10, 10); maxNose = new Size(225, 225); minMouth = new Size(10, 10); maxMouth = new Size(225, 225); font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d); //log record DateTime now = DateTime.Now; logName = now.ToString(); logName = logName.Replace("/", "").Replace(":", "").Replace(" ", ""); }
public void ProcessFaces() { this._stopwatch.Reset(); this._stopwatch.Start(); // get the current frame from our webcam Image<Bgr, Byte> frame = _capture.QueryFrame(); Image<Gray, Byte> gray; //Read the HaarCascade objects using (HaarCascade face = new HaarCascade(FACEFILENAME)) { //Convert it to Grayscale gray = frame.Convert<Gray, Byte>(); //normalizes brightness and increases contrast of the image gray._EqualizeHist(); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel MCvAvgComp[] facesDetected = face.Detect( gray, 1.1, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); if (facesDetected != null && facesDetected.Length > 0) { this.notifyIcon.ShowBalloonTip(5000, "Alert!", "Shoulder surfer detected!",ToolTipIcon.Warning); Console.WriteLine("Shoulder surfer detected!"); } } this._stopwatch.Stop(); Console.WriteLine("Elapsed {0}s", this._stopwatch.Elapsed.TotalSeconds); }
public App(Boolean cameraTurn) { InitializeComponent(); images = slider.Images(); pictureBox2.Image = images[0]; if (!cameraTurn) { cameraState = false; pictureBox1.Hide(); } else { pictureBox2.Hide(); } timer = new DispatcherTimer(); timer.Tick += new EventHandler(timer1_Tick); timer.Interval = new TimeSpan(0, 0, 0, 0, 1); timer.Start(); timer2.Interval = 1000; timer2.Start(); //picturebox jest rozciągany na cały screen pictureBox1.Dock = DockStyle.Fill; // a następnie zawartosc jest rozciagana na całego pictureboxa fullScreen.EnterFullScreen(this); //pictureBox1.Hide(); capture = new Capture(); haarCascade = new HaarCascade(@"haarcascade_frontalface_alt_tree.xml"); }
public FrmPrincipal() { InitializeComponent(); //Proses mendekteksi wajah face = new HaarCascade("haarcascade_frontalface_default.xml"); try { //Prose label untuk setiap gambar string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels+1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); labels.Add(Labels[tf]); } } catch(Exception e) { MessageBox.Show(" Database Masih Kosong !!! ", "Maaf", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
public FormManualTrain(Form1 frm1) { InitializeComponent(); browseImage = new OpenFileDialog(); _form1 = frm1; eigenRecog = new Classifier_Train(); face = new HaarCascade("haarcascade_frontalface_default.xml"); eyeWithGlass = new CascadeClassifier("haarcascade_eye_tree_eyeglasses.xml"); mydb = new DBConn(); minEye = new Size(10, 10); maxEye = new Size(225, 225); font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d); if (File.ReadAllText("setting.txt") != null) { folderPath = File.ReadAllText("setting.txt"); } else { FolderBrowserDialog b = new FolderBrowserDialog(); b.Description = "Please select your installation path"; DialogResult r = b.ShowDialog(); if (r == DialogResult.OK) // Test result. { folderPath = b.SelectedPath; Console.WriteLine(folderPath); File.WriteAllText(@"setting.txt", folderPath); MessageBox.Show("Path is at " + folderPath); } } }
public Admin() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); Loaded += (s, e) => { this.DataContext = CommonData.PicturesVM; if (grabber == null) { CommonData.LoadSavedData(); //check how many faces we already have _countFaces = CommonData.PicturesVM.Pictures.Count; grabber = new Capture(); grabber.QueryFrame(); grabber.Start(); } else { grabber.Start(); } }; Unloaded += (s, e) => { grabber.Stop(); }; CompositionTarget.Rendering += CompositionTarget_Rendering; }
private Bitmap DetectFace(Bitmap faceImage) { var image = new Image<Bgr, byte>(faceImage); var gray = image.Convert<Gray, Byte>(); var haarCascadeFilePath = _httpContext.Server.MapPath("haarcascade_frontalface_default.xml"); var face = new HaarCascade(haarCascadeFilePath); MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.1, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); Image<Gray, byte> result = null; foreach (MCvAvgComp f in facesDetected[0]) { //draw the face detected in the 0th (gray) channel with blue color image.Draw(f.rect, new Bgr(Color.Blue), 2); result = image.Copy(f.rect).Convert<Gray, byte>(); break; } if (result != null) { result = result.Resize(150, 150, INTER.CV_INTER_CUBIC); return result.Bitmap; } return null; }
public FrmPrincipal() { InitializeComponent(); //Load haarcascades for face detection face = new HaarCascade("haarcascade_frontalface_default.xml"); //eye = new HaarCascade("haarcascade_eye.xml"); try { //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); labels.Add(Labels[tf]); } } catch (Exception e) { //MessageBox.Show(e.ToString()); MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
public FrmPrincipal() { InitializeComponent(); //Load haarcascades for face detection face = new HaarCascade("haarcascade_frontalface_default.xml"); try { //Load of previus trainned faces and labels for each image var Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); var Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (var tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); labels.Add(Labels[tf]); } } catch (Exception) { //MessageBox.Show(e.ToString()); MessageBox.Show(@"Nu este nimic in baza de data, adauga cel putin o fata.", @"Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
public DaftarPeg() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); eye = new HaarCascade("haarcascade_eye.xml"); try { //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces2/TrainedLabelsAll.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face_1" + tf + ".bmp"; trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "/TrainedFaces2/" + LoadFaces)); labels.Add(Labels[tf]); } } catch (Exception e) { MessageBox.Show(e.ToString()); //MessageBox.Show("Persiapan untuk memasukkan Data Wajah " + NIP_Enroll + ".", "Load Face Training", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
public Form1() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); this.MouseWheel+=(a,b)=>{ if(textBox1.Focused) { double d = double.Parse(textBox1.Text); d += b.Delta>0?0.01:-0.01; textBox1.Text = d.ToString(); } if (textBox2.Focused) { int d = int.Parse(textBox2.Text); d += b.Delta > 0 ? 1 : -1; textBox2.Text = d.ToString(); } if (textBox3.Focused) { int d = int.Parse(textBox3.Text); d += b.Delta > 0 ? 10 : -10; textBox3.Text = d.ToString(); } }; }
public Form1() { InitializeComponent(); //Cargar la detección de Rostros Metodo de Viola-Jones face = new HaarCascade("haarcascade_frontalface_default.xml"); eye = new HaarCascade("haarcascade_eye.xml"); }
private void btnBrowseHAAR_Click(object sender, EventArgs e) { DialogResult result = openFileDialog1.ShowDialog(); if (result != DialogResult.OK) return; txtCascadeFile.Text = openFileDialog1.FileName; face = new HaarCascade(txtCascadeFile.Text); }
public FaceDetection() { this._image = null; this._gray = null; this._egray = null; this._processedImages = null; this._faces = new HaarCascade(@currentDirectory + @"\Resources\haarcascade_frontalface_alt_tree.xml"); }
//indicates whether frame grabbing from the cameras has been successfull //private bool sucessfull; public LiveStreamFaceDetectingThread(Size frame_size) : base() { haarcascade = new HaarCascade(FRONTAL_FACE_HAARCASCADE_FILE_PATH); this.frame_width = frame_size.Width; this.frame_height = frame_size.Height; WORK_DONE = false; }
private void Form1_Load(object sender, EventArgs e) { // passing 0 gets zeroth webcam _cap = new Capture(0); // adjust path to find your xml _haar = new HaarCascade( "haarcascade_frontalface_alt2.xml"); }
private void FrmPrincipal_Load(object sender, EventArgs e) { //Encender la camara capturar = new Capture(0); timer1.Interval = 40; timer1.Enabled = true; haar = new HaarCascade("haarcascade_frontalface_default.xml"); }
public FaceTrackingInstance(ImageRGB image, HaarCascade cascade) { FSource = image; FHaarCascade = cascade; FTrackingThread = new Thread(fnFindFacesThread); FTrackingThread.Start(); IsRunning = true; }
private void Window_Loaded(object sender, RoutedEventArgs e) { capture = new Capture(); haarCascade = new HaarCascade(@"haarcascade_frontalface_alt_tree.xml"); timer = new DispatcherTimer(); timer.Tick += new EventHandler(timer_Tick); timer.Interval = new TimeSpan(0, 0, 0, 0, 1); timer.Start(); }
public ReviewFaceDetectingThread(Size frame_size) : base() { haarcascade = new HaarCascade(FRONTAL_FACE_HAARCASCADE_FILE_PATH); its_time_to_pick_perpetrator_faces = false; this.frame_size = frame_size; WORK_DONE = false; counter = 0; previous_id = 0; }
protected virtual MCvAvgComp[] GetFacesVector(Image<Gray, byte> grayImage) { var haarCascade = new HaarCascade(this.HaarCascadePath); return haarCascade.Detect(grayImage, this.ScanFactor, //the image where the object are to be detected from this.Neighbours, //factor by witch the window is scaled in subsequent scans HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, //min number of neighbour rectangles Size.Empty, Size.Empty); }
public FrmFaceDetection() { try { InitializeComponent(); webcamVideo = new Capture(); //faceDetection = new HaarCascade(Application.StartupPath + "\\haarcascade_frontalface_default.xml"); faceDetection = new Emgu.CV.HaarCascade("haarcascade_frontalface_default.xml"); cascade = new FaceHaarCascade(); } catch (Exception er) { MessageBox.Show(er.Message, "Face Detection and Recognition Failure", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public static EmguCVFaceDetector Get(Bitmap image, string full_name_of_xml) { Image <Bgr, Byte> image1 = new Image <Bgr, Byte>(image); Image <Gray, Byte> grayImage = image1.Convert <Gray, Byte>(); Emgu.CV.HaarCascade faceCascade; if (full_name_of_xml == "\0") { faceCascade = new Emgu.CV.HaarCascade("haarcascade_frontalface_alt.xml"); } else { faceCascade = new Emgu.CV.HaarCascade(full_name_of_xml); } return(new EmguCVFaceDetector(grayImage, image1, faceCascade)); }
public EmguCVFaceDetector(Image <Gray, Byte> grayimage1, Image <Bgr, Byte> image1, Emgu.CV.HaarCascade faceCascade1) { grayimage = grayimage1; image = image1; faceCascade = faceCascade1; }