예제 #1
1
        /// <summary>
        /// Applies the blob extraction feature of Aforge
        /// </summary>
        /// <param name="Mask">Mask from the flood-fill step</param>
        /// <param name="Source">Source image (full image)</param>
        /// <returns>A list of tuples(blob from mask, rectangle from source)</returns>
        private static List<Tuple<Bitmap, Bitmap>> ApplyBlobExtractor(Bitmap Mask, Bitmap Source)
        {
            List<Tuple<Bitmap, Bitmap>> BlobSrcblock = new List<Tuple<Bitmap, Bitmap>>();

            log.Info("Using AForge Blob Counter to Process Mask");
            AForge.Imaging.BlobCounter blobCounter = new AForge.Imaging.BlobCounter();

            // Sort order
            blobCounter.ObjectsOrder = AForge.Imaging.ObjectsOrder.XY;
            blobCounter.ProcessImage(Mask);
            AForge.Imaging.Blob[] blobs = blobCounter.GetObjects(Mask, false);

            log.Info("Use the Blob Extraction Results to reverse extract blobs from images");
            // Adding images into the image list
            AForge.Imaging.UnmanagedImage currentImg;
            foreach (AForge.Imaging.Blob blob in blobs)
            {
                Rectangle myRect = blob.Rectangle;
                currentImg = blob.Image;
                Bitmap exBlob = currentImg.ToManagedImage();
                AForge.Imaging.Filters.Crop filter = new AForge.Imaging.Filters.Crop(myRect);
                Bitmap exSrc = filter.Apply(Source);
                BlobSrcblock.Add(new Tuple<Bitmap, Bitmap>(exBlob, exSrc));
            }
            log.Info("Extraction Complete: returning List of ( blob bitmap, src bitmap)");
            return BlobSrcblock;
        }
예제 #2
0
        /// <summary>
        /// Applies the blob extraction feature of Aforge
        /// </summary>
        /// <param name="Mask">Mask from the flood-fill step</param>
        /// <param name="Source">Source image (full image)</param>
        /// <returns>A list of tuples(blob from mask, rectangle from source)</returns>
        private static List <Tuple <Bitmap, Bitmap> > ApplyBlobExtractor(Bitmap Mask, Bitmap Source)
        {
            List <Tuple <Bitmap, Bitmap> > BlobSrcblock = new List <Tuple <Bitmap, Bitmap> >();

            log.Info("Using AForge Blob Counter to Process Mask");
            AForge.Imaging.BlobCounter blobCounter = new AForge.Imaging.BlobCounter();

            // Sort order
            blobCounter.ObjectsOrder = AForge.Imaging.ObjectsOrder.XY;
            blobCounter.ProcessImage(Mask);
            AForge.Imaging.Blob[] blobs = blobCounter.GetObjects(Mask, false);

            log.Info("Use the Blob Extraction Results to reverse extract blobs from images");
            // Adding images into the image list
            AForge.Imaging.UnmanagedImage currentImg;
            foreach (AForge.Imaging.Blob blob in blobs)
            {
                Rectangle myRect = blob.Rectangle;
                currentImg = blob.Image;
                Bitmap exBlob = currentImg.ToManagedImage();
                AForge.Imaging.Filters.Crop filter = new AForge.Imaging.Filters.Crop(myRect);
                Bitmap exSrc = filter.Apply(Source);
                BlobSrcblock.Add(new Tuple <Bitmap, Bitmap>(exBlob, exSrc));
            }
            log.Info("Extraction Complete: returning List of ( blob bitmap, src bitmap)");
            return(BlobSrcblock);
        }
예제 #3
0
 public static Bitmap Crop(this Bitmap bitmap, Rectangle cropRect)
 {
     // create filter
     AForge.Imaging.Filters.Crop filter = new AForge.Imaging.Filters.Crop(cropRect);
     // apply the filter
     return(filter.Apply(bitmap));
 }
예제 #4
0
        /// <summary>
        /// Обработка одного сэмпла
        /// </summary>
        /// <param name="index"></param>
        private int processSample(int r, int c)
        {
            ///  Инвертируем изображение
            AForge.Imaging.Filters.Invert InvertFilter = new AForge.Imaging.Filters.Invert();
            InvertFilter.ApplyInPlace(arrayPics[r, c]);

            ///    Создаём BlobCounter, выдёргиваем самый большой кусок, масштабируем, пересечение и сохраняем
            ///    изображение в эксклюзивном использовании
            AForge.Imaging.BlobCounterBase bc = new AForge.Imaging.BlobCounter();

            bc.FilterBlobs = true;
            bc.MinWidth    = 3;
            bc.MinHeight   = 3;
            // Упорядочиваем по размеру
            bc.ObjectsOrder = AForge.Imaging.ObjectsOrder.Size;
            // Обрабатываем картинку

            bc.ProcessImage(arrayPics[r, c]);

            Rectangle[] rects = bc.GetObjectsRectangles();
            if (rects.Length == 0)
            {
                finalPics[r, c] = AForge.Imaging.UnmanagedImage.FromManagedImage(new Bitmap(100, 100));
                return(0);
            }

            // К сожалению, код с использованием подсчёта blob'ов не работает, поэтому просто высчитываем максимальное покрытие
            // для всех блобов - для нескольких цифр, к примеру, 16, можем получить две области - отдельно для 1, и отдельно для 6.
            // Строим оболочку, включающую все блоки. Решение плохое, требуется доработка
            int lx = arrayPics[r, c].Width;
            int ly = arrayPics[r, c].Height;
            int rx = 0;
            int ry = 0;

            for (int i = 0; i < rects.Length; ++i)
            {
                if (lx > rects[i].X)
                {
                    lx = rects[i].X;
                }
                if (ly > rects[i].Y)
                {
                    ly = rects[i].Y;
                }
                if (rx < rects[i].X + rects[i].Width)
                {
                    rx = rects[i].X + rects[i].Width;
                }
                if (ry < rects[i].Y + rects[i].Height)
                {
                    ry = rects[i].Y + rects[i].Height;
                }
            }

            // Обрезаем края, оставляя только центральные блобчики
            AForge.Imaging.Filters.Crop cropFilter = new AForge.Imaging.Filters.Crop(new Rectangle(lx, ly, rx - lx, ry - ly));
            finalPics[r, c] = cropFilter.Apply(arrayPics[r, c]);

            //  Масштабируем до 100x100
            AForge.Imaging.Filters.ResizeBilinear scaleFilter = new AForge.Imaging.Filters.ResizeBilinear(100, 100);
            finalPics[r, c] = scaleFilter.Apply(finalPics[r, c]);

            //  Ну и распознаём
            currentDeskState[r * 4 + c] = patternMatch(r, c);

            return(0);
        }
예제 #5
0
        public void ProcessImage(Bitmap bitmap, bool justShow)
        {
            //  «Распиливаем» изображение на 16 фрагментов - по отдельной плитке каждый

            //  Минимальная сторона изображения (обычно это высота)
            if (bitmap.Height > bitmap.Width)
            {
                throw new Exception("К такой забавной камере меня жизнь не готовила!");
            }
            int side = bitmap.Height;

            //  Отпиливаем границы, но не более половины изображения
            if (side < 4 * settings.border)
            {
                settings.border = side / 4;
            }
            side -= 2 * settings.border;

            Rectangle cropRect = new Rectangle((bitmap.Width - bitmap.Height) / 2 + settings.border, settings.border, side, side);

            original = new Bitmap(cropRect.Width, cropRect.Height);

            //  Рисуем рамки на оригинале
            Graphics g = Graphics.FromImage(original);

            g.DrawImage(bitmap, new Rectangle(0, 0, original.Width, original.Height), cropRect, GraphicsUnit.Pixel); Pen p = new Pen(Color.Red);

            //  Проверяем значение полей из settings
            if (side < 10 * settings.margin)
            {
                settings.margin = side / 10;
            }

            //  Высчитываем сторону блока для извлечения
            int sz        = side / 4 - 2 * settings.margin;
            int blockSide = side / 4;

            for (int r = 0; r < 4; ++r)
            {
                for (int c = 0; c < 4; ++c)
                {
                    g.DrawRectangle(p, new Rectangle(settings.margin + c * blockSide, settings.margin + r * blockSide, sz, sz));
                }
            }

            //  Рисуем цифры, которые распознались на предыдущем шаге?
            if (justShow)
            {
                DrawNumbersOnOriginalBitmap(g, blockSide);
            }

            //  Конвертируем изображение в градации серого
            AForge.Imaging.Filters.Grayscale grayFilter = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721);
            processed = grayFilter.Apply(AForge.Imaging.UnmanagedImage.FromManagedImage(original));

            //  Масштабируем изображение до 500x500 - этого достаточно
            AForge.Imaging.Filters.ResizeBilinear scaleFilter = new AForge.Imaging.Filters.ResizeBilinear(500, 500);
            original = scaleFilter.Apply(original);

            //  Если распознавание не планируем – просто выход
            if (justShow)
            {
                return;
            }

            //  Обнуляем state
            for (int i = 0; i < 16; ++i)
            {
                currentDeskState[i] = 0;
            }

            //  Пороговый фильтр применяем. Величина порога берётся из настроек, и меняется на форме
            AForge.Imaging.Filters.BradleyLocalThresholding threshldFilter = new AForge.Imaging.Filters.BradleyLocalThresholding();
            threshldFilter.PixelBrightnessDifferenceLimit = settings.differenceLim;
            threshldFilter.ApplyInPlace(processed);

            for (int r = 0; r < 4; ++r)
            {
                for (int c = 0; c < 4; ++c)
                {
                    //  Берём очередной фрагмент - с дополнительными отступами (мы же там нарисовали прямоугольники)
                    AForge.Imaging.Filters.Crop cropFilter = new AForge.Imaging.Filters.Crop(new Rectangle(2 + settings.margin + c * blockSide, 2 + settings.margin + r * blockSide, sz - 4, sz - 4));
                    arrayPics[r, c] = cropFilter.Apply(processed);

                    //arrayPics[r, c] = processed.Clone(new Rectangle(2+settings.margin + c * blockSide, 2+settings.margin + r * blockSide, sz-4, sz-4), processed.PixelFormat);
                    //  И выполняем сопоставление
                    processSample(r, c);
                }
            }
            DrawNumbersOnOriginalBitmap(g, blockSide);
        }
예제 #6
0
        public int PAConstantSet(Image <Gray, Byte> inputimg)
        {
            int cons = -1;
            int t    = 3;//tickness in pixel


            Rectangle ROI;

            AForge.Imaging.Filters.Crop filter;
            #region 4 ROIs



            ROI    = new Rectangle(0, 0, inputimg.Width, t);
            filter = new AForge.Imaging.Filters.Crop(ROI);
            Image <Gray, Byte> up      = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));
            Image <Gray, Byte> up_temp = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));

            ROI    = new Rectangle(0, inputimg.Height - t, inputimg.Width, t);
            filter = new AForge.Imaging.Filters.Crop(ROI);
            Image <Gray, Byte> Down      = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));
            Image <Gray, Byte> Down_temp = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));

            ROI    = new Rectangle(inputimg.Width - t, t, t, inputimg.Height - t);
            filter = new AForge.Imaging.Filters.Crop(ROI);
            Image <Gray, Byte> Right      = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));
            Image <Gray, Byte> Right_temp = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));



            ROI    = new Rectangle(0, t, t, inputimg.Height - t);
            filter = new AForge.Imaging.Filters.Crop(ROI);
            Image <Gray, Byte> Left      = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));
            Image <Gray, Byte> Left_temp = new Image <Gray, Byte>(filter.Apply(inputimg.Bitmap));


            #endregion 4 ROIs

            #region threshold
            double av = 0;
            for (int i = 0; i < 60; i += 2)
            {
                CvInvoke.cvAdaptiveThreshold(up, up_temp, 255, METState.Current.PAdaptive_type, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY_INV, METState.Current.PAdaptive_blockSize, i);
                CvInvoke.cvAdaptiveThreshold(Right, Right_temp, 255, METState.Current.PAdaptive_type, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY_INV, METState.Current.PAdaptive_blockSize, i);
                CvInvoke.cvAdaptiveThreshold(Down, Down_temp, 255, METState.Current.PAdaptive_type, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY_INV, METState.Current.PAdaptive_blockSize, i);
                CvInvoke.cvAdaptiveThreshold(Left, Left_temp, 255, METState.Current.PAdaptive_type, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY_INV, METState.Current.PAdaptive_blockSize, i);

                av = up_temp.GetAverage().Intensity + Right_temp.GetAverage().Intensity + Down_temp.GetAverage().Intensity + Left_temp.GetAverage().Intensity;

                Debug.WriteLine(av + " up=" + up_temp.GetAverage().Intensity + " right=" + Right_temp.GetAverage().Intensity + " down=" + Down_temp.GetAverage().Intensity + " left=" + Left_temp.GetAverage().Intensity);

                // METState.Current.EyeImageForShow = new Image<Bgr,byte>(EmgImgProcssing.Filter_PupilAdaptiveThreshold(inputimg , 255, true, i).Bitmap );
                // METState.Current.EyeImageTest = up_temp.ConcateVertical(Left_temp).ConcateVertical(Down_temp).ConcateHorizontal(Right_temp);
                // METState.Current.METCoreObject.SendToForm("", "EyeImage");

                if (av < 5)//METState.Current.glintThreshold
                {
                    cons = i;
                    break;
                }
            }
            #endregion threshold
            Debug.WriteLine(av);



            return(cons);
        }
        public Bitmap CreateMosaic(int grid_width, int grid_height, string sourceFolder, Bitmap original_img, bool ColourMosaic)
        {
            int original_width  = original_img.Width;
            int original_height = original_img.Height;

            //int grid_side = 30;
            int sub_image_width  = (int)(original_width / (double)grid_width);
            int sub_image_height = (int)(original_height / (double)grid_height);

            Console.WriteLine($"Sub image: {sub_image_width} x {sub_image_height}");
            Console.WriteLine($"Sub image pixels: {grid_width * sub_image_width} x {grid_height * sub_image_height} == {original_width} x {original_height}");

            AForge.Imaging.Filters.ResizeNearestNeighbor resize    = new AForge.Imaging.Filters.ResizeNearestNeighbor(sub_image_width, sub_image_height);
            AForge.Imaging.Filters.Grayscale             grayscale = new AForge.Imaging.Filters.Grayscale(1 / 3.0, 1 / 3.0, 1 / 3.0);

            string[] files = Directory.GetFiles(sourceFolder);

            Bitmap   combined_image = new Bitmap(original_width, original_height);
            Graphics g = Graphics.FromImage(combined_image);

            Bitmap original_gray = grayscale.Apply(original_img);

            Bitmap[] original_slices = new Bitmap[grid_width * grid_height];
            for (int i = 0; i < grid_width; i++)
            {
                for (int j = 0; j < grid_height; j++)
                {
                    if (CancelProcessing)
                    {
                        return(null);
                    }
                    Rectangle rect = new Rectangle(i * sub_image_width, j * sub_image_height, sub_image_width, sub_image_height);
                    AForge.Imaging.Filters.Crop crop_region = new AForge.Imaging.Filters.Crop(rect);
                    Bitmap slice = (ColourMosaic == true) ? crop_region.Apply(original_img) : crop_region.Apply(original_gray);
                    original_slices[i * grid_width + j] = slice;
                }
            }

            Bitmap[] candidates = new Bitmap[files.Length];
            for (int i = 0; i < candidates.Length; i++)
            {
                if (i % 100 == 0 || i + 100 > candidates.Length)
                {
                    int PercentComplete = (int)((100.0 * i) / candidates.Length);
                    Console.WriteLine($"Candidate preprocessing progress: {PercentComplete}%");
                    OnProcessingProgressChanged(new ProcessingProgressEventArgs()
                    {
                        PercentComplete = PercentComplete, ProcessingTask = "Preprocessing..."
                    });
                }

                if (CancelProcessing)
                {
                    return(null);
                }

                if (!IsImage(files[i]))
                {
                    continue;
                }

                Bitmap candidate_image = AForge.Imaging.Image.FromFile(files[i]);
                Bitmap candidate_gray;
                Bitmap resized_image;
                if (IsGrayScale(candidate_image) && false)
                {
                    if (ColourMosaic)
                    {
                        continue;
                    }

                    resized_image = resize.Apply(candidate_image);
                }
                else
                {
                    if (ColourMosaic)
                    {
                        resized_image = resize.Apply(candidate_image);
                    }
                    else
                    {
                        candidate_gray = grayscale.Apply(candidate_image);
                        resized_image  = resize.Apply(candidate_gray);
                    }
                }

                candidates[i] = resized_image;
            }

            List <int> used_indices = new List <int>();
            int        step         = 0;

            for (int i = 0; i < grid_width; i++)
            {
                for (int j = 0; j < grid_height; j++)
                {
                    if (CancelProcessing)
                    {
                        return(null);
                    }
                    int PercentComplete = (int)((100.0 * step) / (grid_width * grid_height - 1));
                    OnProcessingProgressChanged(new ProcessingProgressEventArgs()
                    {
                        PercentComplete = PercentComplete, ProcessingTask = "Creating mosaic..."
                    });
                    Console.WriteLine($"Finding best match to slice {step}/{grid_width * grid_height - 1}...");
                    int best_match_index = FindBestMatch(original_slices[step], candidates, used_indices);
                    used_indices.Add(best_match_index);
                    Bitmap sub_image = candidates[best_match_index];
                    int    cornerX   = i * sub_image_width;
                    int    cornerY   = j * sub_image_height;
                    g.DrawImage(sub_image, new Point(cornerX, cornerY));
                    step++;
                }
            }

            combined_image.Save("combined_image.jpg");

            return(combined_image);
        }
예제 #8
0
        private Bitmap resize(Bitmap image, int newWidth, int newHeight)
        {
            if (KeepAspectRatio)
            {

                double ratio = (double)newHeight / (double)image.Height;
                newWidth = (int)((double)image.Width * ratio);

            }

            AForge.Imaging.Filters.Crop cropper = new AForge.Imaging.Filters.Crop(new Rectangle(0, 0, newWidth, newHeight));

            if (ScalingMethod == ScalingMethods.Nearest_Neighbor || ScalingMethod == ScalingMethods.Bicubic)
            {
                AForge.Imaging.Filters.ResizeNearestNeighbor resizer = new AForge.Imaging.Filters.ResizeNearestNeighbor(newWidth, newHeight);
                image = cropper.Apply(resizer.Apply((Bitmap)image));
            }
            if (ScalingMethod == ScalingMethods.Bicubic)
            {
                MessageBox.Show("Bicubic resize is not implimented for now.\nNReverting to nearest neighbor...");
                //AForge.Imaging.Filters.ResizeBicubic resizer = new AForge.Imaging.Filters.ResizeBicubic(newWidth, newHeight);
                //image = cropper.Apply(resizer.Apply((Bitmap)image));
            }
            if (ScalingMethod == ScalingMethods.Bilinear)
            {
                AForge.Imaging.Filters.ResizeBilinear resizer = new AForge.Imaging.Filters.ResizeBilinear(newWidth, newHeight);
                image = cropper.Apply(resizer.Apply((Bitmap)image));
            }

            return image;
        }
예제 #9
0
        private Bitmap resize(Bitmap image, int newWidth, int newHeight)
        {
            if (keepAspectRatio)
            {

                double ratio = (double)newHeight / (double)image.Height;
                newWidth = (int)((double)image.Width * ratio);

            }

            AForge.Imaging.Filters.Crop cropper = new AForge.Imaging.Filters.Crop(new Rectangle(0, 0, newWidth, newHeight));

            if (cmbScalingMethod.SelectedIndex == 0)
            {
                AForge.Imaging.Filters.ResizeNearestNeighbor resizer = new AForge.Imaging.Filters.ResizeNearestNeighbor(newWidth, newHeight);
                image = cropper.Apply(resizer.Apply((Bitmap)image));
            }
            if (cmbScalingMethod.SelectedIndex == 1)
            {
                AForge.Imaging.Filters.ResizeBicubic resizer = new AForge.Imaging.Filters.ResizeBicubic(newWidth, newHeight);
                image = cropper.Apply(resizer.Apply((Bitmap)image));
            }
            if (cmbScalingMethod.SelectedIndex == 2)
            {
                AForge.Imaging.Filters.ResizeBilinear resizer = new AForge.Imaging.Filters.ResizeBilinear(newWidth, newHeight);
                image = cropper.Apply(resizer.Apply((Bitmap)image));
            }

            return image;
        }
예제 #10
0
        /// <summary>
        /// Обработка одного сэмпла
        /// </summary>
        /// <param name="index"></param>
        public static string processSample(ref Imaging.UnmanagedImage unmanaged)
        {
            string rez = "Обработка";

            ///  Инвертируем изображение
            AForge.Imaging.Filters.Invert InvertFilter = new AForge.Imaging.Filters.Invert();
            InvertFilter.ApplyInPlace(unmanaged);

            ///    Создаём BlobCounter, выдёргиваем самый большой кусок, масштабируем, пересечение и сохраняем
            ///    изображение в эксклюзивном использовании
            AForge.Imaging.BlobCounterBase bc = new AForge.Imaging.BlobCounter();

            bc.FilterBlobs = true;
            bc.MinWidth    = 10;
            bc.MinHeight   = 10;
            // Упорядочиваем по размеру
            bc.ObjectsOrder = AForge.Imaging.ObjectsOrder.Size;
            // Обрабатываем картинку

            bc.ProcessImage(unmanaged);

            Rectangle[] rects = bc.GetObjectsRectangles();
            rez = "Насчитали " + rects.Length.ToString() + " прямоугольников!";
            //if (rects.Length == 0)
            //{
            //    finalPics[r, c] = AForge.Imaging.UnmanagedImage.FromManagedImage(new Bitmap(100, 100));
            //    return 0;
            //}

            // К сожалению, код с использованием подсчёта blob'ов не работает, поэтому просто высчитываем максимальное покрытие
            // для всех блобов - для нескольких цифр, к примеру, 16, можем получить две области - отдельно для 1, и отдельно для 6.
            // Строим оболочку, включающую все блоки. Решение плохое, требуется доработка
            int lx = unmanaged.Width;
            int ly = unmanaged.Height;
            int rx = 0;
            int ry = 0;

            for (int i = 0; i < rects.Length; ++i)
            {
                if (lx > rects[i].X)
                {
                    lx = rects[i].X;
                }
                if (ly > rects[i].Y)
                {
                    ly = rects[i].Y;
                }
                if (rx < rects[i].X + rects[i].Width)
                {
                    rx = rects[i].X + rects[i].Width;
                }
                if (ry < rects[i].Y + rects[i].Height)
                {
                    ry = rects[i].Y + rects[i].Height;
                }
            }

            // Обрезаем края, оставляя только центральные блобчики
            AForge.Imaging.Filters.Crop cropFilter = new AForge.Imaging.Filters.Crop(new Rectangle(lx, ly, rx - lx, ry - ly));
            unmanaged = cropFilter.Apply(unmanaged);

            //  Масштабируем до 100x100
            AForge.Imaging.Filters.ResizeBilinear scaleFilter = new AForge.Imaging.Filters.ResizeBilinear(100, 100);
            unmanaged = scaleFilter.Apply(unmanaged);

            return(rez);
        }
예제 #11
0
        /// <summary>
        /// Image processor and the heart of sensor
        /// </summary>
        /// <param name="sender">object - owner</param>
        /// <param name="eventArgs">NewFrameEventArgs - args, contains frame from camera</param>
        private void processFrame(object sender, NewFrameEventArgs eventArgs)
        {
            Rectangle   rect = eventArgs.Frame.Bounds();
            PixelFormat pf   = eventArgs.Frame.PixelFormat;

            Bitmap frame = eventArgs.Frame.Clone(rect, pf);

            scaleX = frame.Width / processWidth;
            scaleY = frame.Height / processHeight;
            Bitmap frameFace = eventArgs.Frame.Clone(rect, pf);

            if (OnMotionDetected != null)
            {
                var dataMotion = frame.GetDirectAccess();
                var frameUI    = dataMotion.GetUnmanaged();
                if (motion.ProcessFrame(frameUI) > 0.15)
                {
                    updateMotion(true);
                }
                else
                {
                    updateMotion(false);
                }
                frame.UnlockBits(dataMotion);
            }

            if (OnFaceDetected != null)
            {
                var dataFace        = frameFace.GetDirectAccess();
                var faceUI          = dataFace.GetUnmanaged();
                var downsample      = faceUI.ResizeTo(processWidth, processHeight);
                var faceDetections  = detectorFace.ProcessFrame(downsample);
                var faceDetections2 = detectorFaceProfile.ProcessFrame(downsample);

                if (isPreview)
                {
                    if (faceDetections.Length > 0)
                    {
                        marker             = new Accord.Imaging.Filters.RectanglesMarker(faceDetections.Scale(scaleX, scaleY));
                        marker.MarkerColor = Color.Yellow;
                        frame = marker.Apply(frame);
                    }

                    if (faceDetections2.Length > 0)
                    {
                        marker             = new Accord.Imaging.Filters.RectanglesMarker(faceDetections2.Scale(scaleX, scaleY));
                        marker.MarkerColor = Color.Yellow;
                        frame = marker.Apply(frame);
                    }
                }


                frameFace.UnlockBits(dataFace);

                if (detectorFace.DetectedObjects != null && detectorFace.DetectedObjects.Length > 0)
                {
                    var faces = detectorFace.DetectedObjects.ToFaces((int)scaleX, (int)scaleY);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        var cutter = new AForge.Imaging.Filters.Crop(faces[i].Bounds);
                        faces[i].FaceImage = cutter.Apply(frameFace);

                        if (searchForFaceDirection)
                        {
                            detectorNose.ProcessFrame(faces[i].FaceImage);
                            if (detectorNose.DetectedObjects.Length > 0)
                            {
                                faces[i].Direction = FaceDirection.Frontal;
                            }
                        }

                        var eyeDetections = detectorEye.ProcessFrame(faces[i].FaceImage);

                        if (eyeDetections.Length > 0)
                        {
                            if (eyeDetections.Length >= 1)
                            {
                                faces[i].Direction = FaceDirection.Frontal;
                            }

                            Eye[] eyes = new Eye[eyeDetections.Length];
                            for (int ie = 0; ie < eyes.Length; ie++)
                            {
                                eyes[ie]        = new Eye();
                                eyes[ie].Left   = faces[i].Left + eyeDetections[ie].X;
                                eyes[ie].Top    = faces[i].Top + eyeDetections[ie].Y;
                                eyes[ie].Width  = eyeDetections[ie].Width;
                                eyes[ie].Height = eyeDetections[ie].Height;
                                var cutter2 = new AForge.Imaging.Filters.Crop(eyes[ie].Bounds);
                                eyes[ie].EyeImage = cutter.Apply(frameFace);
                            }

                            if (isPreview)
                            {
                                marker             = new Accord.Imaging.Filters.RectanglesMarker(eyes.toRects());
                                marker.MarkerColor = Color.Orange;
                                frame = marker.Apply(frame);
                            }

                            updateEyeDetected(eyes);
                        }
                    }
                    updateFaceDetected(faces);
                }
                else if (detectorFaceProfile.DetectedObjects != null && detectorFaceProfile.DetectedObjects.Length > 0)
                {
                    var faces = detectorFaceProfile.DetectedObjects.ToFaces((int)scaleX, (int)scaleY);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        var cutter = new AForge.Imaging.Filters.Crop(faces[i].Bounds);
                        faces[i].FaceImage = cutter.Apply(frameFace);

                        if (searchForFaceDirection)
                        {
                            detectorEarLeft.ProcessFrame(faces[i].FaceImage);
                            if (detectorEarLeft.DetectedObjects.Length > 0)
                            {
                                faces[i].Direction = FaceDirection.TurnedRight;
                            }
                            else
                            {
                                detectorEarRight.ProcessFrame(faces[i].FaceImage);
                                if (detectorEarRight.DetectedObjects.Length > 0)
                                {
                                    faces[i].Direction = FaceDirection.TurnedLeft;
                                }
                                else
                                {
                                    faces[i].Direction = FaceDirection.NoInfo;
                                }
                            }
                        }
                    }
                    updateFaceDetected(faces);
                }
            }


            updateFrameReceived(frame);
        }
예제 #12
0
        public static Dictionary <Rectangle, Bitmap> Split(KeyValuePair <Rectangle, Bitmap> originalImage, int maxSize, byte quality)
        {
            var result = new Dictionary <Rectangle, Bitmap>();

            if (ImageToByteArray(originalImage.Value, quality).Length <= maxSize)
            {
                result.Add(originalImage.Key, originalImage.Value);
                return(result);
            }

            Bitmap firstBitmap;
            Bitmap secondBitmap;

            Rectangle r1;
            Rectangle r2;

            if (originalImage.Value.Width > originalImage.Value.Height)
            {
                //split vertically

                AForge.Imaging.Filters.Crop crop1 = new AForge.Imaging.Filters.Crop(new Rectangle(0, 0, originalImage.Value.Width / 2, originalImage.Value.Height));
                AForge.Imaging.Filters.Crop crop2 = new AForge.Imaging.Filters.Crop(new Rectangle(originalImage.Value.Width / 2, 0, originalImage.Value.Width / 2, originalImage.Value.Height));

                firstBitmap  = crop1.Apply(originalImage.Value);
                secondBitmap = crop2.Apply(originalImage.Value);

                //firstBitmap = new Bitmap(originalImage.Value.Width/2, originalImage.Value.Height);
                //secondBitmap = new Bitmap(originalImage.Value.Width/2, originalImage.Value.Height);

                //Graphics g1 = Graphics.FromImage(firstBitmap);
                //g1.DrawImage(originalImage.Value, 0, 0,
                //             new Rectangle(0, 0, originalImage.Value.Width/2, originalImage.Value.Height),
                //             GraphicsUnit.Pixel);

                //Graphics g2 = Graphics.FromImage(secondBitmap);
                //g2.DrawImage(originalImage.Value, 0, 0,
                //             new Rectangle(originalImage.Value.Width / 2, 0, originalImage.Value.Width / 2, originalImage.Value.Height),
                //             GraphicsUnit.Pixel);

                r1 = new Rectangle(originalImage.Key.X, originalImage.Key.Y, originalImage.Key.Width / 2,
                                   originalImage.Key.Height);

                r2 = new Rectangle(originalImage.Key.X + originalImage.Key.Width / 2, originalImage.Key.Y, originalImage.Key.Width / 2,
                                   originalImage.Key.Height);
            }
            else
            {
                //split horizontally


                AForge.Imaging.Filters.Crop crop1 = new AForge.Imaging.Filters.Crop(new Rectangle(0, 0, originalImage.Value.Width, originalImage.Value.Height / 2));
                AForge.Imaging.Filters.Crop crop2 = new AForge.Imaging.Filters.Crop(new Rectangle(0, originalImage.Value.Height / 2, originalImage.Value.Width, originalImage.Value.Height / 2));

                firstBitmap  = crop1.Apply(originalImage.Value);
                secondBitmap = crop2.Apply(originalImage.Value);


                //firstBitmap = new Bitmap(originalImage.Value.Width, originalImage.Value.Height/2);
                //secondBitmap = new Bitmap(originalImage.Value.Width, originalImage.Value.Height/2);

                //Graphics g1 = Graphics.FromImage(firstBitmap);
                //g1.DrawImage(originalImage.Value, 0, 0,
                //             new Rectangle(0, 0, originalImage.Value.Width, originalImage.Value.Height/2),
                //             GraphicsUnit.Pixel);

                //Graphics g2 = Graphics.FromImage(secondBitmap);
                //g2.DrawImage(originalImage.Value, 0, 0,
                //             new Rectangle(0, originalImage.Value.Height/2, originalImage.Value.Width, originalImage.Value.Height/2),
                //             GraphicsUnit.Pixel);

                r1 = new Rectangle(originalImage.Key.X, originalImage.Key.Y, originalImage.Key.Width,
                                   originalImage.Key.Height / 2);

                r2 = new Rectangle(originalImage.Key.X, originalImage.Key.Y + originalImage.Key.Height / 2, originalImage.Key.Width,
                                   originalImage.Key.Height / 2);
            }


            return(Split(new KeyValuePair <Rectangle, Bitmap>(r1, firstBitmap), maxSize, quality).Concat(
                       Split(new KeyValuePair <Rectangle, Bitmap>(r2, secondBitmap), maxSize, quality)).ToDictionary(p => p.Key, p => p.Value));
        }