Ejemplo n.º 1
0
        private void ProcessNewImageFromDevice(Image inputImage, DeviceType device, ImageChangedEventArgs imgArgs)
        {
            if (DeviceType.Scanner == device)
            {
                //ConfigurationManager.RefreshSection("appSettings");
                //var ScanJpg = (ConfigurationManager.AppSettings["ScanJpgPath"]);
                //if (ScanJpg != "")
                //{
                //    try
                //    {
                //        var temp = Image.FromFile(ScanJpg);
                //        inputImage = temp;
                //    }
                //    catch
                //    {
                //        logger.Info("ScanJpg FILE NOT FOUND");
                //    }
                //    logger.Info("ScanJpgPath in config not null!We CHANGE analyze image!!!");
                //}
            }
            var  baseImg     = (Image)inputImage.Clone();
            bool isFaceFound = false;
            Image <Bgr, Byte> originalFrame = new Image <Bgr, Byte>(new Bitmap((Image)inputImage.Clone())); // берем оргинальную картинку

            if (device == DeviceType.Scanner)
            {
                var   errs = new List <AnalyzeImageResultType>();
                float face_width, face_height, face_delta_min, face_delta_max;
                int   dpi;
                try
                {
                    face_width     = float.Parse(ConfigurationManager.AppSettings["face_width"], System.Globalization.CultureInfo.InvariantCulture);
                    face_height    = float.Parse(ConfigurationManager.AppSettings["face_height"], System.Globalization.CultureInfo.InvariantCulture);
                    face_delta_min = float.Parse(ConfigurationManager.AppSettings["face_delta_min"], System.Globalization.CultureInfo.InvariantCulture);
                    face_delta_max = float.Parse(ConfigurationManager.AppSettings["face_delta_max"], System.Globalization.CultureInfo.InvariantCulture);
                    dpi            = int.Parse(ConfigurationManager.AppSettings["ScannerDpi"]);
                }
                catch
                {
                    //Console.WriteLine("Не смог считать параметры face_wigth и.т.д.");
                    logger.Error("Не смог считать параметры face_wigth и.т.д.");
                    face_width     = 2.0f;
                    face_height    = 2.6f;
                    face_delta_min = 0.2f;
                    face_delta_max = 0.9f;
                    dpi            = 300;
                }
                double minRectangleSquare = face_width * face_height * face_delta_min;
                double maxRectamgleSquare = face_width * face_height * face_delta_max;
                originalFrame = new Image <Bgr, Byte>(new Bitmap((Image)inputImage.Clone()));
                inputImage.Save("Crop" + DateTime.Now.ToString("yyyy-MM-dd-hh-mm") + ".jpg");
                var cfg = string.IsNullOrEmpty(ConfigurationManager.AppSettings["UsePythonForFindFaceInScan"]) ? true : bool.Parse(ConfigurationManager.AppSettings["UsePythonForFindFaceInScan"]);
                if (!cfg)
                {
                    var Result    = FindFace(inputImage);
                    int GoodFaces = Result.GoodFacesFound();
                    if (GoodFaces == 1)
                    {
                        //need doc detector before invoke
                        inputImage = CheckFramePosition(inputImage);
                        AnalyzeCompleted(this, new AnalyzeCompletedEventArgs(device, new List <AnalyzeImageResultType>(), inputImage, Result.faceImage));
                        return;
                    }
                    else
                    {
                        if (GoodFaces == 0)
                        {
                            AnalyzeImage(this, new AnalyzeCompletedEventArgs(device, new List <AnalyzeImageResultType> {
                                AnalyzeImageResultType.FaceNotFound
                            }));
                            return;
                        }
                        else
                        {
                            for (int i = 0; i < GoodFaces; i++)
                            {
                                Result.RList[i].faceImage.Save("GoodFace_" + i + DateTime.Now.ToString("yyyy-MM-dd-hh-mm") + ".jpg");
                            }
                            AnalyzeImage(this, new AnalyzeCompletedEventArgs(device, new List <AnalyzeImageResultType> {
                                AnalyzeImageResultType.MoreThanOnePerson
                            }));
                            return;
                        }
                    }
                    return;
                }
                isFaceFound = true;
            } // end scanner logic
            if (isFinalGoodImageFound == true && device == DeviceType.WebCam)
            {   // процесс обработки с камеры закончен
                //Console.WriteLine("Good final image found. We will not process more images.");
                logger.Info("Good final image found.  We will not process more images.");
                return; // ничего делать не надо
            }
            currentFrame = originalFrame;

            isFaceFound = true;
            FaceDetectorProcessor fdp = new FaceDetectorProcessor(currentFrame);

            Rectangle[] faceFoundRect = fdp.isFaceDetected();
            if (faceFoundRect != null && faceFoundRect.Length == 1)
            {
                //isFaceFound = true;
                // рисуем рамку лица на экране
                using (Graphics g = Graphics.FromImage(inputImage))
                {
                    Rectangle rec = faceFoundRect[0];

                    var scaleX = getScale(baseImg.Height, inputImage.Height);

                    var scaleY = getScale(baseImg.Width, inputImage.Width);

                    rec.X = Convert.ToInt32(rec.X * scaleX);

                    rec.Y = Convert.ToInt32(rec.Y * scaleY);

                    rec.Height = Convert.ToInt32(rec.Height * scaleY);

                    rec.Width = Convert.ToInt32(rec.Width * scaleX);

                    g.DrawRectangle(new Pen(Color.LimeGreen, 5.0f), rec);
                }

                //inputImage.Save(string.Format("D:\\\\qwe {0}.png", DateTime.Now.Ticks));
            }
            //вежливо попросили мусорщика забрать объект, на который ссылался указатель fdp, т.к. все равно создадим новый объек в памяти, в следующей итерации.
            fdp = null;
            if (faceFoundRect != null && faceFoundRect.Length > 1)
            {
                // у нас слишком много лиц
                var Error = new List <AnalyzeImageResultType>();
                Error.Add(AnalyzeImageResultType.MoreThanOnePerson);
                AnalyzeImage(this, new AnalyzeCompletedEventArgs(device, Error, baseImg, null));
                //Console.WriteLine("More than 2 faces found in CV in c#!");
                logger.Info("More than 2 faces found in CV in c#!");
                return;
            }
            if (isFaceFound && backgoundThread_ImageAnalizer.IsBusy == false && isFinalGoodImageFound == false)
            {
                var dto = new ImgAnalyzerDto
                {
                    BaseImage    = baseImg,
                    Device       = device,
                    Img          = originalFrame,
                    isCamStopped = imgArgs.СamStopped,
                    ImgFilePath  = imgArgs.ImgPath
                };
                logger.Info("START THREAD TO ANALIZE IN PYTHON !");
                backgoundThread_ImageAnalizer.RunWorkerAsync(dto);
            }
            else
            {
                //  кастом логика для webCam
                if (device == DeviceType.WebCam && imgArgs.СamStopped && backgoundThread_ImageAnalizer.IsBusy == false)
                {
                    if (webCamErrors.ToList().Count == 0)
                    {
                        webCamErrors.Add(AnalyzeImageResultType.FaceNotFound);
                    }

                    AnalyzeCompleted(this, new AnalyzeCompletedEventArgs(device, webCamErrors.ToList(), inputImage, null));

                    return;
                }



                AnalyzeImage(this, new AnalyzeCompletedEventArgs(device, new List <AnalyzeImageResultType>()
                {
                    AnalyzeImageResultType.FaceNotFound
                }, inputImage, null));



                if (!webCamErrors.Contains(AnalyzeImageResultType.FaceNotFound))
                {
                    webCamErrors.Add(AnalyzeImageResultType.FaceNotFound);
                }
            }

            //logger.Info("RunParralelFrameGrabber exiting");
        }
Ejemplo n.º 2
0
        private void FrameGrabber_Parrellel(Image inputImage)
        {
            if (isFinalImageFound == true)
            {
                Console.WriteLine("JOB DONE");
                return;
            }

            //Get the current frame form capture device
            _counter++;

            Image <Bgr, Byte> originalFrame = new Image <Bgr, Byte>(new Bitmap(inputImage));

            currentFrame = originalFrame;

            // See if we have face, process is fast, sync!
            FaceDetectorProcessor fdp = new FaceDetectorProcessor(currentFrame);
            bool isFaceFound          = fdp.isFaceDetected();

            Console.WriteLine("Face detected: " + isFaceFound.ToString() + "   Python is busy: " + bwImageAnalizer.IsBusy.ToString());

            if (isFaceFound == true)
            {
                if (bwImageAnalizer.IsBusy == false)
                {
                    bwImageAnalizer.RunWorkerAsync(originalFrame);
                }
            }


            /*
             * //Convert it to Grayscale
             * //Clear_Faces_Found();
             *
             * if (currentFrame != null)
             * {
             *  gray_frame = currentFrame.Convert<Gray, Byte>();
             *  //Face Detector
             *  Rectangle[] facesDetected = Face.DetectMultiScale(gray_frame, 1.2, 10, new Size(50, 50), Size.Empty);
             *
             *  //Action for each element detected
             *  for (int i = 0; i < facesDetected.Length; i++)
             *  {
             *      try
             *      {
             *          facesDetected[i].X += (int)(facesDetected[i].Height * 0.15);
             *          facesDetected[i].Y += (int)(facesDetected[i].Width * 0.22);
             *          facesDetected[i].Height -= (int)(facesDetected[i].Height * 0.3);
             *          facesDetected[i].Width -= (int)(facesDetected[i].Width * 0.35);
             *
             *          result = currentFrame.Copy(facesDetected[i]).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             *          result._EqualizeHist();
             *          //draw the face detected in the 0th (gray) channel with blue color
             *          currentFrame.Draw(facesDetected[i], new Bgr(Color.Red), 2);
             *
             *          if (Eigen_Recog.IsTrained)
             *          {
             *              string name = Eigen_Recog.Recognise(result);
             *              int match_value = (int)Eigen_Recog.Get_Eigen_Distance;
             *
             *              //Draw the label for each face detected and recognized
             *              currentFrame.Draw(name + " ", ref font, new Point(facesDetected[i].X - 2, facesDetected[i].Y - 2), new Bgr(Color.LightGreen));
             *              ADD_Face_Found(result, name, match_value);
             *          }
             *
             *      }
             *      catch
             *      {
             *          //do nothing as parrellel loop buggy
             *          //No action as the error is useless, it is simply an error in
             *          //no data being there to process and this occurss sporadically
             *      }
             *  };
             *  //Show the faces procesed and recognized
             *  image_PICBX.Image = currentFrame.ToBitmap();
             * }*/
        }
Ejemplo n.º 3
0
        private FindResult FindFace(Image InputImage)
        {
            int   counter = 0;
            float face_width, face_height, face_delta_min, face_delta_max;
            int   dpi;

            try
            {
                face_width     = float.Parse(ConfigurationManager.AppSettings["face_width"], System.Globalization.CultureInfo.InvariantCulture);
                face_height    = float.Parse(ConfigurationManager.AppSettings["face_height"], System.Globalization.CultureInfo.InvariantCulture);
                face_delta_min = float.Parse(ConfigurationManager.AppSettings["face_delta_min"], System.Globalization.CultureInfo.InvariantCulture);
                face_delta_max = float.Parse(ConfigurationManager.AppSettings["face_delta_max"], System.Globalization.CultureInfo.InvariantCulture);
                dpi            = int.Parse(ConfigurationManager.AppSettings["ScannerDpi"]);
            }
            catch
            {
                //Console.WriteLine("Не смог считать параметры face_wigth и.т.д.");
                logger.Error("Не смог считать параметры face_wigth и.т.д.");
                face_width     = 2.0f;
                face_height    = 2.6f;
                face_delta_min = 0.2f;
                face_delta_max = 0.9f;
                dpi            = 300;
            }
            double minRectangleSquare = face_width * face_height * face_delta_min;
            double maxRectamgleSquare = face_width * face_height * face_delta_max;

            int goodFacesFound = 0;
            var Result         = new FindResult(false, null);

            while (counter <= 3)
            {
                FaceDetectorProcessor scanFaceDetectorProcessor = new FaceDetectorProcessor(new Image <Bgr, byte>(new Bitmap(InputImage)));
                Rectangle[]           facesOnScan = scanFaceDetectorProcessor.isFaceDetected();
                goodFacesFound = 0;
                double RectangleSquare = 0;
                foreach (var item in facesOnScan)
                {
                    RectangleSquare = ((double)item.Width / Math.Round(InputImage.HorizontalResolution) * 2.54 * ((double)item.Height / Math.Round(InputImage.HorizontalResolution)) * 2.54);
                    var croppedFace = DocDetector.CropImage(new Bitmap(InputImage), item);
                    if (checkFaceSquare(RectangleSquare, minRectangleSquare, maxRectamgleSquare))
                    {
                        croppedFace.Save("ScanCropedFaceGood" + DateTime.Now.ToString("yyyy-MM-dd-hh-mm") + ".jpg");
                        Result.update(true, croppedFace);
                        goodFacesFound++;
                    }
                    else
                    {
                        Result.update(false, croppedFace);
                        croppedFace.Save("ScanCropedFaceBad" + DateTime.Now.ToString("yyyy-MM-dd-hh-mm") + ".jpg");
                    }
                }
                if (goodFacesFound == 1)
                {
                    return(Result);
                    //return new FindResult(true,)
                }
                InputImage.RotateFlip(RotateFlipType.Rotate90FlipNone);
                scanFaceDetectorProcessor = null;
                facesOnScan = null;
                counter++;
                System.Windows.Forms.Application.DoEvents();
            }
            return(Result);
        }