QueryGrayFrame() public method

Capture a Gray image frame
public QueryGrayFrame ( ) : Byte>.Image
return Byte>.Image
Ejemplo n.º 1
0
        private static void StartClient()
        {
            th_cli = new Thread (delegate() {
                try
                {
                    ConsoleAdditives.WriteHeader("Stream started");
                    Capture cap = new Capture();
                    while(_isRunning) {
                        byte[] buf = cap.QueryGrayFrame().Bytes;
                        int buflp = buf.Length/5;

                        for(byte i=0;i<5;i++)
                        {
                            byte[] tbuf = new byte[buflp];
                            tbuf[0]=i;
                            for(int j=1;j<buflp;j++)
                            {
                                tbuf[j]=buf[i*buflp+j];
                            }
                            client.Send(tbuf,buflp,remoteEP);
                        }
                    }
                    ConsoleAdditives.WriteHeader("Stream stoped");
                }
                catch(Exception ex)
                {
                    Console.WriteLine(ex.ToString());
                }

            });
            th_cli.Start ();
        }
Ejemplo n.º 2
0
        private void button2_Click(object sender, System.EventArgs e)
        {
            try
            {
                //Trained face counter
                ContTrain = ContTrain + 1;

                //Get a gray frame from capture device
                gray = webcam.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //Face Detector
                MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                    face,
                    1.2,
                    10,
                    Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                    new Size(20, 20));

                //Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = currentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                //resize face detected image for force to compare the same size with the
                //test image with cubic interpolation type method
                TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                trainingImages.Add(TrainedFace);
                labels.Add(textBox1.Text);



                //Show face added in gray scale
                imageBox1.Image = TrainedFace;

                //Write the number of triained faces in a file text for further load
                File.WriteAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.xml", trainingImages.ToArray().Length.ToString() + "%");

                //Write the labels of triained faces in a file text for further load
                for (int i = 1; i < trainingImages.ToArray().Length + 1; i++)
                {
                    trainingImages.ToArray()[i - 1].Save(Application.StartupPath + "/TrainedFaces/face" + i + ".bmp");
                    File.AppendAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.xml", labels.ToArray()[i - 1] + "%");
                }

                MessageBox.Show(textBox1.Text + "´s face detected and added :)", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
                //textBox1.Text= ("");
            }
            catch
            {
                MessageBox.Show("Enable the face detection first", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }
        }
        /// <summary>
        /// Извлечение ключевых кадров из списка кадров
        /// </summary>
        /// <param name="videoFileName">Имя видео файла</param>
        /// <param name="frameWidth">Ширина кадра</param>
        /// <param name="frameHeight">Высота кадра</param>
        /// <param name="keyFramesInformation">Список нужных кадров</param>
        /// <returns>Ключевые кадры</returns>
        private List<GreyVideoFrame> GetKeyFrames(string videoFileName, int frameWidth, int frameHeight, List<KeyFrameIOInformation> keyFramesInformation)
        {
            try
            {
                List<GreyVideoFrame> keyFrames = new List<GreyVideoFrame>();
                ImageConvertor imageConvertor = new ImageConvertor();

                string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
                string framesDirName = Path.Combine(videoPath, "VideoFrames");
                if (!Directory.Exists(framesDirName))
                    Directory.CreateDirectory(framesDirName);

                Capture capture = new Capture(videoFileName);
                Image<Gray, Byte> frame = capture.QueryGrayFrame();

                int frameNumber = 0;
                CheckKeyFrameAndAddIfInList(keyFrames, keyFramesInformation, frame, framesDirName, frameNumber, frameWidth, frameHeight);
                if (frame != null)
                {
                    keyFrameExtractedEvent(frameNumber, frameNumber + 1, false);
                    do
                    {
                        frame = capture.QueryGrayFrame();
                        ++frameNumber;
                        CheckKeyFrameAndAddIfInList(keyFrames, keyFramesInformation, frame, framesDirName, frameNumber, frameWidth, frameHeight);
                        if (frame != null)
                            keyFrameExtractedEvent(frameNumber, frameNumber + 1, false);
                        else
                            keyFrameExtractedEvent(frameNumber, frameNumber + 1, true);
                    }
                    while (frame != null);
                }
                else
                    keyFrameExtractedEvent(frameNumber, frameNumber + 1, true);
                return keyFrames;
            }
            catch (Exception exception)
            {
                throw exception;
            }
        }
        /// <summary>
        /// Поиск ключевых кадров (второй проход алгоритма)
        /// </summary>
        /// <param name="videoFileName">Имя видеофайла</param>
        /// <param name="frameWidth">Ширина кадра</param>
        /// <param name="frameHeight">Высотка кадра</param>
        /// <param name="framesDifferences">Разница кадров</param>
        /// <param name="treshold">Порог</param>
        /// <returns></returns>
        private List<GreyVideoFrame> GetKeyFrames(string videoFileName, int frameWidth, int frameHeight,
            List<int> framesDifferences, double treshold)
        {
            try
            {
                List<GreyVideoFrame> keyFrames = new List<GreyVideoFrame>();
                ImageConvertor imageConvertor = new ImageConvertor();

                string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
                string framesDirName = Path.Combine(videoPath, "VideoFrames");
                if (!Directory.Exists(framesDirName))
                    Directory.CreateDirectory(framesDirName);

                Capture capture = new Capture(videoFileName);
                Image<Gray, Byte> frame = capture.QueryGrayFrame().Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                AddKeyFrameFunction(keyFrames, frame, Path.Combine(framesDirName, "0.jpg"), 0, true);

                int framesDifferencesNumber = framesDifferences.Count;
                int previousFrameNumber = 0;
             //   BitmapConvertor bitmapConvertor = new BitmapConvertor();
                for (int i = 0; i < framesDifferencesNumber; i++)
                {
                    frame = capture.QueryGrayFrame();
                    if (framesDifferences[i] > treshold && i + 1 != previousFrameNumber + 1)
                    {
                        int frameNumber = i + 1;
                        previousFrameNumber = i + 1;
                        frame = capture.QueryGrayFrame().Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                        AddKeyFrameFunction(keyFrames, frame, Path.Combine(framesDirName, frameNumber.ToString() + ".jpg"), frameNumber, true);
                    }
                    if (i == framesDifferencesNumber - 1)
                        keyFrameExtractedEvent(i, i + 1, true);
                    else
                        keyFrameExtractedEvent(i, i + 1, false);
                }
                return keyFrames;
            }
            catch (Exception exception)
            {
                throw exception;
            }
        }
        /// <summary>
        /// Вычисление разницы кадров (первый проход алгоритма)
        /// </summary>
        /// <param name="videoFileName">Имя видеофайла</param>        
        /// <param name="cannyThreshold">Порог для Кенни</param>
        /// <param name="cannyThresholdLinking">Порог слияния границ для Кении</param>
        /// <returns></returns>
        private List<int> GetFramesDifferences(string videoFileName, int frameWidth, int frameHeight, Gray cannyThreshold, Gray cannyThresholdLinking)
        {
            try
            {
                List<int> framesDifferences = new List<int>();

                Capture capture = new Capture(videoFileName);
                Image<Gray, Byte> currentFrame = capture.QueryGrayFrame().Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                Image<Gray, Byte> nextFrame = null;
                int frameNumber = 0;
                do
                {
                    nextFrame = capture.QueryGrayFrame();//.Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                    ++frameNumber;
                    if (nextFrame != null)
                    {
                        nextFrame = nextFrame.Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                        Image<Gray, Byte> currentCannyFrame = currentFrame.Canny(cannyThreshold, cannyThresholdLinking);
                        Image<Gray, Byte> nextCannyFrame = nextFrame.Canny(cannyThreshold, cannyThresholdLinking);
                        int framesDifference = CountFramesDifference(currentCannyFrame, nextCannyFrame);
                        framesDifferences.Add(framesDifference);
                        currentFrame = nextFrame;
                        framesDifferenceEvent(frameNumber - 1, frameNumber, false);
                    }
                    else
                        framesDifferenceEvent(frameNumber - 1, frameNumber, true);
                }
                while (nextFrame != null);

                return framesDifferences;
            }
            catch (Exception exception)
            {
                throw exception;
            }
        }
Ejemplo n.º 6
0
        private void LoadHandTrainingPatternsFromDir(string path)
        {
            try
            {
                byte[] TrainPatterns;
                MNistHeight = 32;
                MNistWidth = 32;
                MNistSize = MNistWidth * MNistHeight;
                int TrainingLabelCount = 10;
                int LabelImageCount = 20;
                TrainingPatternsCount = TrainingLabelCount*LabelImageCount;

                TrainPatterns = new byte[TrainingPatternsCount * MNistSize];
                unsafe
                {

                    for (int ii = 0; ii < TrainingLabelCount; ii++)
                    {
                        string type = ii.ToString("D1");
                        //Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image  
                        //Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
                        Capture cap = new Capture(path + "\\" + type + ".MOV");
                        for (int i = 0; i < LabelImageCount; i++)
                        {
                            Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
                            for (int j = 0; j < MNistSize; j++)
                            {
                                TrainPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
                            }
                        }
                        cap.Dispose();
                    }
                }
                MNISTTraining = new ByteImageData[TrainingPatternsCount];
                Parallel.For(0, TrainingPatternsCount, parallelOption, j =>
                {
                    int label = j / LabelImageCount;
                    ByteImageData imageData = new ByteImageData(label, new byte[MNistSize]);
                    for (int i = 0; i < MNistSize; i++)
                    {
                        imageData.Image[i] = TrainPatterns[(j * MNistSize) + i];
                    }
                    MNISTTraining[j] = imageData;
                });

            }
            catch (Exception)
            {
                throw;
            }
        }
Ejemplo n.º 7
0
        private void LoadHandTestingPatternsFromDir(string path)
        {
            try
            {
                byte[] TestPatterns;
                MNistHeight = 32;
                MNistWidth = 32;
                MNistSize = MNistWidth * MNistHeight;
                int TrainingLabelCount = 9;
                int LabelImageCount = 100;
                TestingPatternsCount = TrainingLabelCount * LabelImageCount;
                TestPatterns = new byte[TestingPatternsCount * MNistSize];
                //Capture cap = new Capture(@"D:\ebooks\hand gestrue recognition\hand data set\mov\0.MOV");
                unsafe
                {

                    for (int ii = 0; ii < TrainingLabelCount; ii++)
                    {
                        string type = ii.ToString("D1");
                        //Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image  
                        //Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
                        Capture cap = new Capture(path + "\\" + type + ".MOV");
                        for(int i =0; i<200;i++)
                        {
                            cap.QueryGrayFrame();//skip first 200 frames
                        }
                        for (int i = 0; i < LabelImageCount; i++)
                        {
                            Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
                            for (int j = 0; j < MNistSize; j++)
                            {
                                TestPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
                            }
                        }
                        cap.Dispose();
                    }
                }


                MNISTTesting = new ByteImageData[TestingPatternsCount];
                Parallel.For(0, TestingPatternsCount, parallelOption, j =>
                {
                    ByteImageData pattern = new ByteImageData(j / LabelImageCount, new byte[MNistSize]);
                    for (int i = 0; i < MNistSize; i++)
                    {
                        pattern.Image[i] = TestPatterns[(j * MNistSize) + i];
                    }
                    MNISTTesting[j] = pattern;
                });

            }
            catch (Exception)
            {
                throw;
            }
        }
Ejemplo n.º 8
0
 //кнопка "Добавить лицо в базу"
 private void metroButton1_Click(object sender, EventArgs e)
 {
     //на всякий случай проверка на успешность
     try
     {
         //проверка, существует ли папка с базой
         String dir = Application.StartupPath + "/Faces";
         //если не существует, создаю эту папку
         if (!Directory.Exists(dir))
         {
             Directory.CreateDirectory(dir);
         }
     }
     catch (Exception)
     {
         //вывод ошибки и закрытие программы
         MessageBox.Show("Что-то пошло не так. Попробуйте перезапустить приложение", "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error);
         ifCamNotOkey();
     }
     if (textName.Text == "")
     {
         MessageBox.Show("Похоже, что вы не ввели имя для лица", "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error);
     }
     else
     {
         //проверка при ошибочном ПЕРВОМ добавлении лица
         try
         {
             //счётчик, считающий количество "лиц" в базе
             Count = Count + 1;
             //устанавливаю разрешение камеры
             grayFace = camera.QueryGrayFrame().Resize(855, 588, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             //ищу прямоугольные области, в которых есть лица (размер "квадрата" - 20 на 20)
             MCvAvgComp[][] DetectedFaces = grayFace.DetectHaarCascade(faceDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
             foreach (MCvAvgComp f in DetectedFaces[0])
             {
                 TrainedFace = Frame.Copy(f.rect).Convert <Gray, byte>();
                 break;
             }
             //лица, которые я нашёл, "фотографирую", уменьшая размер фото 100 на 100 и делая его ЧБ
             TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             //добавляю это лицо в базу
             trainingImames.Add(TrainedFace);
             //запоминаю имя этого лица
             labels.Add(textName.Text);
             //счётчик лиц + добавление изменённых лиц в папку с базой
             File.WriteAllText(Application.StartupPath + "/Faces/Faces.txt", "Количество лиц: " + trainingImames.ToArray().Length.ToString() + "\n");
             //добавление новых лиц в папку
             for (int i = 1; i < trainingImames.ToArray().Length + 1; i++)
             {
                 trainingImames.ToArray()[i - 1].Save(Application.StartupPath + "/Faces/face" + i + ".bmp");
                 //запись имени лица в текстовый документ
                 File.AppendAllText(Application.StartupPath + "/Faces/Faces.txt", labels.ToArray()[i - 1] + "\n");
             }
             //вывод сообщения о добавлении лица в базу лиц
             MessageBox.Show(textName.Text + " - лицо успешно добавлено в базу лиц", "Успех!", MessageBoxButtons.OK, MessageBoxIcon.Information);
             metroComboBox1.Items.Clear();
             string[] text = File.ReadAllLines(Application.StartupPath + "/Faces/Faces.txt");
             //чистка окна для ввода лица от прошлого сообщения
             metroComboBox1.Items.AddRange(text);
             textName.Text = "";
         }
         catch (Exception)
         {
             //если лицо не обнаружено
             MessageBox.Show("При первом добавлении лица возникла ошибка", "Лицо не обнаружено!", MessageBoxButtons.OK, MessageBoxIcon.Error);
         }
     }
 }
Ejemplo n.º 9
0
        /// <summary>
        /// Загрузка кадра по номеру (с видео)
        /// </summary>
        /// <param name="videoFileName">Имя видеофайла</param>
        /// <param name="keyFrameIOInformation">Информация о кадре</param>
        /// <returns>Кард</returns>
        public Task<GreyVideoFrame> LoadFrameAsync(string videoFileName, KeyFrameIOInformation keyFrameIOInformation)
        {
            try
            {
                if (videoFileName == null || videoFileName.Length == 0)
                    throw new ArgumentNullException("Null videoFileName in LoadFrameAsync");
                if (keyFrameIOInformation == null)
                    throw new ArgumentNullException("Null keyFrameIOInformation in LoadFrameAsync");
                if (keyFrameIOInformation.Number < 0)
                    throw new ArgumentException("Error frameNumber in LoadFrameAsync");
                if (keyFrameIOInformation.Width <= 0)
                    throw new ArgumentException("Error Width in LoadFrameAsync");
                if (keyFrameIOInformation.Height <= 0)
                    throw new ArgumentException("Error Height in LoadFrameAsync");

                return Task.Run(() =>
                {
                  /*  string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
                    string framesDirName = System.IO.Path.Combine(videoPath, "VideoFrames");
                    if (!Directory.Exists(framesDirName))
                        Directory.CreateDirectory(framesDirName);*/

                    GreyVideoFrame videoFrame = null;

                    int currentFrameNumnber = -1;
                    Capture capture = new Capture(videoFileName);
                    Image<Gray, byte> frame = null;
                    while (currentFrameNumnber != keyFrameIOInformation.Number)
                    {
                        frame = capture.QueryGrayFrame();
                        currentFrameNumnber++;
                    }
                    if (frame != null)
                    {
                       // string frameFileName = Path.Combine(framesDirName, keyFrameIOInformation.Number.ToString() + ".jpg");
                        frame = frame.Resize(keyFrameIOInformation.Width, keyFrameIOInformation.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                      //  frame.Save(frameFileName);
                        videoFrame = CreateVideoFrame(frame, keyFrameIOInformation);
                    }
                    capture.Dispose();
                    return videoFrame;
                });
            }
            catch (Exception exception)
            {
                throw exception;
            }
        }