Exemple #1
0
        public VectorOfMat Dctspy()
        {
            VectorOfMat vector = new VectorOfMat();

            if (processMode)
            {
                Mat tR = new Mat();
                Mat tG = new Mat();
                Mat tB = new Mat();

                dctR.AugmentShow().CopyTo(tR);
                vector.Push(tR);
                dctG.AugmentShow().CopyTo(tG);
                vector.Push(tG);
                dctB.AugmentShow().CopyTo(tB);
                vector.Push(tB);
            }
            else
            {
                Mat temp = new Mat();

                dctGray.AugmentShow().CopyTo(temp);
                vector.Push(temp);
            }

            return(vector);
        }
Exemple #2
0
        private void toAdaptiveYCrCbEqualization(Mat img)
        {
            if (img.NumberOfChannels < 3)
            {
                MessageBoxResult result = MessageBox.Show("흑백 영상은 변환할 수 없습니다.", "경고");
                return;
            }

            CvInvoke.CvtColor(img, CurrentMat, ColorConversion.Bgr2YCrCb);
            var channels = CurrentMat.Split(); // 이미지 채널 분리

            CvInvoke.CLAHE(channels[0], 2, new System.Drawing.Size(8, 8), channels[0]);
            CvInvoke.CLAHE(channels[1], 2, new System.Drawing.Size(8, 8), channels[1]);
            CvInvoke.CLAHE(channels[2], 2, new System.Drawing.Size(8, 8), channels[2]);

            VectorOfMat temp = new VectorOfMat();

            temp.Push(channels[0]);
            temp.Push(channels[1]);
            temp.Push(channels[2]);

            CvInvoke.Merge(temp, CurrentMat);
            CvInvoke.CvtColor(CurrentMat, CurrentMat, ColorConversion.YCrCb2Bgr);
            showImg(CurrentMat);
        }
Exemple #3
0
        public VectorOfMat Dftspy()
        {
            VectorOfMat vector = new VectorOfMat();

            if (processMode)
            {
                Mat tR = new Mat();
                Mat tG = new Mat();
                Mat tB = new Mat();

                dftR.DftShift().CopyTo(tR);
                vector.Push(tR);
                dftG.DftShift().CopyTo(tG);
                vector.Push(tG);
                dftB.DftShift().CopyTo(tB);
                vector.Push(tB);
            }
            else
            {
                Mat temp = new Mat();
                dftGray.DftShift().CopyTo(temp);
                vector.Push(temp);
            }

            return(vector);
        }
Exemple #4
0
        //Канал
        public Image <Bgr, byte> GetChannels(bool red, bool green, bool blue)
        {
            //var channel = sourceImage.Split()[0]; //[] - channel index
            bool[] channels = new bool[3] {
                blue, green, red
            };
            Image <Bgr, byte> destImage = sourceImage.CopyBlank();

            VectorOfMat vm = new VectorOfMat();

            for (int i = 0; i < 3; i++)
            {
                if (channels[i] == true)
                {
                    vm.Push(sourceImage.Split()[i]);
                }
                else
                {
                    vm.Push(sourceImage.Split()[i].CopyBlank());
                }
            }

            CvInvoke.Merge(vm, destImage);
            return(destImage);
        }
Exemple #5
0
        private void SaveFace(object sender, RoutedEventArgs e)
        {
            if (SelectedFace == null)
            {
                MessageBox.Show("Выберите лицо для регистрации", "Ошибка регистрации", MessageBoxButton.OK, MessageBoxImage.Warning);
                return;
            }

            FaceRecognizer = new FisherFaceRecognizer(0, 3500);
            var imageList = new VectorOfMat();
            var labelList = new VectorOfInt();

            imageList.Push(SelectedFace.CVImage.Resize(100, 100, Inter.Cubic).Mat);

            var samples = Directory.GetFiles("Face Samples");

            foreach (var sample in samples)
            {
                imageList.Push(new Image <Gray, byte>(sample).Resize(100, 100, Inter.Cubic).Mat);
            }

            labelList.Push(Enumerable.Range(1, samples.Length + 1).ToArray());
            FaceRecognizer.Train(imageList, labelList);

            DialogResult = true;
            Close();
        }
Exemple #6
0
        public Image <Bgr, byte> channelfilter(Image <Bgr, byte> simage, int i)
        {
            var channel = simage.Split()[i];
            Image <Bgr, byte> destImage = simage.CopyBlank();
            VectorOfMat       vm        = new VectorOfMat();

            switch (i)
            {
            case 0:
            {
                vm.Push(channel);
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
                break;
            }

            case 1:
            {
                vm.Push(channel.CopyBlank());
                vm.Push(channel);
                vm.Push(channel.CopyBlank());
                break;
            }

            case 2:
            {
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
                vm.Push(channel);
                break;
            }
            }
            CvInvoke.Merge(vm, destImage);
            return(destImage);
        }
Exemple #7
0
        public Emgu.CV.Image <Bgr, byte> chanelsChange(Image <Bgr, byte> sourceImage, int type)
        {
            var channel = sourceImage.Split()[type];

            Image <Bgr, byte> destImage = sourceImage.CopyBlank();

            VectorOfMat vm = new VectorOfMat();

            if (type == 0)
            {
                vm.Push(channel);
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
            }
            else if (type == 1)
            {
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
                vm.Push(channel);
            }
            else if (type == 2)
            {
                vm.Push(channel.CopyBlank());
                vm.Push(channel);
                vm.Push(channel.CopyBlank());
            }

            CvInvoke.Merge(vm, destImage);

            return(destImage.Resize(640, 480, Inter.Linear));
        }
Exemple #8
0
        //Функция вывода значения канала
        public static Image <Bgr, byte> Chanel(int i, Image <Bgr, byte> sourceImage)
        {
            var channel = sourceImage.Split()[i];
            Image <Bgr, byte> destImage = sourceImage.CopyBlank();

            VectorOfMat vm = new VectorOfMat();

            if (i == 0)
            {
                vm.Push(channel);
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
            }
            if (i == 1)
            {
                vm.Push(channel.CopyBlank());
                vm.Push(channel);
                vm.Push(channel.CopyBlank());
            }
            if (i == 2)
            {
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
                vm.Push(channel);
            }
            CvInvoke.Merge(vm, destImage);

            return(destImage);
        }
Exemple #9
0
        private void comboBox1_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (comboBox1.SelectedIndex == 0)
            {
                var channel = sourceImage.Split()[2];

                Image <Bgr, byte> destImage = sourceImage.CopyBlank();

                VectorOfMat vm = new VectorOfMat();

                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());
                vm.Push(channel);

                CvInvoke.Merge(vm, destImage);

                imageBox2.Image = destImage;
            }
            if (comboBox1.SelectedIndex == 1)
            {
                var channel = sourceImage.Split()[1];

                Image <Bgr, byte> destImage = sourceImage.CopyBlank();

                VectorOfMat vm = new VectorOfMat();

                vm.Push(channel.CopyBlank());
                vm.Push(channel);
                vm.Push(channel.CopyBlank());

                CvInvoke.Merge(vm, destImage);

                imageBox2.Image = destImage;
            }
            if (comboBox1.SelectedIndex == 2)
            {
                var channel = sourceImage.Split()[0];

                Image <Bgr, byte> destImage = sourceImage.CopyBlank();

                VectorOfMat vm = new VectorOfMat();

                vm.Push(channel);
                vm.Push(channel.CopyBlank());
                vm.Push(channel.CopyBlank());

                CvInvoke.Merge(vm, destImage);

                imageBox2.Image = destImage;
            }
        }
Exemple #10
0
        public Image <Bgr, byte> Red()
        {
            var channel = sourceImage.Split()[0];

            Image <Bgr, byte> destImage = sourceImage.CopyBlank();

            VectorOfMat vm = new VectorOfMat();

            vm.Push(channel);
            vm.Push(channel.CopyBlank());
            vm.Push(channel.CopyBlank());

            CvInvoke.Merge(vm, destImage);
            return(destImage);
        }
Exemple #11
0
        private void button2_Click(object sender, EventArgs e)
        {
            //获取最佳Size,以便可以使用FFT,通常2*3*5倍数
            int M = CvInvoke.GetOptimalDFTSize(image.Rows);
            int N = CvInvoke.GetOptimalDFTSize(image.Cols);
            //图像扩展
            Mat padded = new Mat();

            CvInvoke.CopyMakeBorder(image, padded, 0, M - image.Rows, 0, N - image.Cols, BorderType.Constant, new MCvScalar(1));

            //创建一个2通道矩阵,0通道为源图数据,1通道为0

            Mat m = new Mat(padded.Size, DepthType.Cv32F, 1);

            m.SetTo(new MCvScalar(255));
            CvInvoke.Divide(padded, m, padded);
            m.SetTo(new MCvScalar(0));
            VectorOfMat matVector = new VectorOfMat();

            matVector.Push(padded);
            matVector.Push(m);
            Mat matComplex = new Mat(padded.Size, DepthType.Cv32F, 2);

            CvInvoke.Merge(matVector, matComplex);
            padded.Dispose();
            m.Dispose();
            matVector.Dispose();
            // This will hold the DFT data,创建2通道矩阵,储存变换后结果
            Matrix <float> forwardDft = new Matrix <float>(image.Rows, image.Cols, 2);

            CvInvoke.Dft(matComplex, forwardDft, DxtType.Forward, 0);

            // We'll display the magnitude,显示谱图像
            Matrix <float> forwardDftMagnitude = GetDftMagnitude(forwardDft);

            SwitchQuadrants(ref forwardDftMagnitude);

            // Now compute the inverse to see if we can get back the original
            //进行反变换
            Matrix <float> reverseDft = new Matrix <float>(forwardDft.Rows, forwardDft.Cols, 2);

            CvInvoke.Dft(forwardDft, reverseDft, DxtType.InvScale, 0);
            Matrix <float> reverseDftMagnitude = GetDftMagnitude(reverseDft);

            imageBox1.Image = image;
            imageBox2.Image = Matrix2Image(forwardDftMagnitude);
            imageBox3.Image = Matrix2Image(reverseDftMagnitude);
        }
Exemple #12
0
        private void calculateToolStripMenuItem_Click(object sender, EventArgs e)
        {
            try
            {
                if (pictureBox1.Image == null)
                {
                    return;
                }

                var img = new Bitmap(pictureBox1.Image)
                          .ToImage <Gray, byte>();
                Mat     hist     = new Mat();
                float[] ranges   = new float[] { 0, 256 };
                int[]   channel  = { 0 };
                int[]   histSize = { 256 };

                VectorOfMat ms = new VectorOfMat();
                ms.Push(img);
                CvInvoke.CalcHist(ms, channel, null, hist, histSize, ranges, false);

                HistogramViewer viewer = new HistogramViewer();
                viewer.Text     = "Image Histogram";
                viewer.ShowIcon = false;
                viewer.HistogramCtrl.AddHistogram("Image Histogram", Color.Blue, hist, 256, ranges);
                viewer.HistogramCtrl.Refresh();
                viewer.Show();


                //pictureBox1.Image = CreateGraph(hist).GetImage();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Exemple #13
0
        //Faz stich de dois frames de dois video separados
        private void stichFirstFrameToolStripMenuItem_Click(object sender, System.EventArgs e)
        {
            try
            {
                using (Stitcher stitcher = new Stitcher(true))
                {
                    using (VectorOfMat vm = new VectorOfMat())
                    {
                        vm.Push(sourceImages);
                        var stitchStatus = stitcher.Stitch(vm, result);

                        if (stitchStatus)
                        {
                            Bitmap bt = new Bitmap(result.Bitmap);
                            //por algum motivo a imagem fica rodada :(
                            bt.RotateFlip(RotateFlipType.RotateNoneFlipXY);
                            pictureBox1.Image = bt;
                            // pictureBox1.Image.Save(@"path", ImageFormat.Jpeg);
                        }
                        else
                        {
                            MessageBox.Show(this, String.Format("Stiching Error: {0}", stitchStatus));
                            pictureBox1.Image = null;
                        }
                    }
                }
            }
            catch
            {
            }
        }
Exemple #14
0
 public Image <Gray, byte> GetChannel(int channelIndex)
 {
     if (channelIndex != 3)
     {
         var channel = sourceImage.Split()[channelIndex];
         return(channel);
     }
     else
     {
         Mat         destImage = new Mat();
         VectorOfMat vm        = new VectorOfMat();
         vm.Push(sourceImage.Split()[0]); vm.Push(sourceImage.Split()[1]); vm.Push(sourceImage.Split()[2]);
         CvInvoke.Merge(vm, destImage);
         return(destImage.ToImage <Gray, byte>());
     }
 }
        /*
         * /// <summary>
         * /// Create a LevMarqSparse solver
         * /// </summary>
         * public LevMarqSparse()
         * {
         * _ptr = CvInvoke.CvCreateLevMarqSparse();
         * }*/

        /// <summary>
        /// Useful function to do simple bundle adjustment tasks
        /// </summary>
        /// <param name="points">Positions of points in global coordinate system (input and output), values will be modified by bundle adjustment</param>
        /// <param name="imagePoints">Projections of 3d points for every camera</param>
        /// <param name="visibility">Visibility of 3d points for every camera</param>
        /// <param name="cameraMatrix">Intrinsic matrices of all cameras (input and output), values will be modified by bundle adjustment</param>
        /// <param name="R">rotation matrices of all cameras (input and output), values will be modified by bundle adjustment</param>
        /// <param name="T">translation vector of all cameras (input and output), values will be modified by bundle adjustment</param>
        /// <param name="distCoeffcients">distortion coefficients of all cameras (input and output), values will be modified by bundle adjustment</param>
        /// <param name="termCrit">Termination criteria, a reasonable value will be (30, 1.0e-12) </param>
        public static void BundleAdjust(
            MCvPoint3D64f[] points, MCvPoint2D64f[][] imagePoints, int[][] visibility,
            Matrix <double>[] cameraMatrix, Matrix <double>[] R, Matrix <double>[] T, Matrix <double>[] distCoeffcients, MCvTermCriteria termCrit)
        {
            using (Matrix <double> imagePointsMat = CvToolbox.GetMatrixFromPoints(imagePoints))
                using (Matrix <int> visibilityMat = CvToolbox.GetMatrixFromArrays(visibility))
                    using (VectorOfMat cameraMatVec = new VectorOfMat())
                        using (VectorOfMat rMatVec = new VectorOfMat())
                            using (VectorOfMat tMatVec = new VectorOfMat())
                                using (VectorOfMat distorMatVec = new VectorOfMat())
                                {
                                    cameraMatVec.Push(cameraMatrix);
                                    rMatVec.Push(R);
                                    tMatVec.Push(T);
                                    distorMatVec.Push(distCoeffcients);


                                    GCHandle handlePoints = GCHandle.Alloc(points, GCHandleType.Pinned);

                                    CvInvoke.CvLevMarqSparseAdjustBundle(
                                        cameraMatrix.Length,
                                        points.Length, handlePoints.AddrOfPinnedObject(),
                                        imagePointsMat, visibilityMat, cameraMatVec, rMatVec, tMatVec, distorMatVec, ref termCrit);

                                    handlePoints.Free();
                                }
        }
Exemple #16
0
      /*
      /// <summary>
      /// Create a LevMarqSparse solver
      /// </summary>
      public LevMarqSparse()
      {
         _ptr = CvInvoke.CvCreateLevMarqSparse();
      }*/

      /// <summary>
      /// Useful function to do simple bundle adjustment tasks
      /// </summary>
      /// <param name="points">Positions of points in global coordinate system (input and output), values will be modified by bundle adjustment</param>
      /// <param name="imagePoints">Projections of 3d points for every camera</param>
      /// <param name="visibility">Visibility of 3d points for every camera</param>
      /// <param name="cameraMatrix">Intrinsic matrices of all cameras (input and output), values will be modified by bundle adjustment</param>
      /// <param name="R">rotation matrices of all cameras (input and output), values will be modified by bundle adjustment</param>
      /// <param name="T">translation vector of all cameras (input and output), values will be modified by bundle adjustment</param>
      /// <param name="distCoeffcients">distortion coefficients of all cameras (input and output), values will be modified by bundle adjustment</param>
      /// <param name="termCrit">Termination criteria, a reasonable value will be (30, 1.0e-12) </param>
      public static void BundleAdjust(
         MCvPoint3D64f[] points, MCvPoint2D64f[][] imagePoints, int[][] visibility,
         Matrix<double>[] cameraMatrix, Matrix<double>[] R, Matrix<double>[] T, Matrix<double>[] distCoeffcients, MCvTermCriteria termCrit)
      {
         using (Matrix<double> imagePointsMat = CvToolbox.GetMatrixFromPoints(imagePoints))
         using (Matrix<int> visibilityMat = CvToolbox.GetMatrixFromArrays(visibility))
         using (VectorOfMat cameraMatVec = new VectorOfMat())
         using (VectorOfMat rMatVec = new VectorOfMat())
         using (VectorOfMat tMatVec = new VectorOfMat())
         using (VectorOfMat distorMatVec = new VectorOfMat())
         {
            cameraMatVec.Push(cameraMatrix);
            rMatVec.Push(R);
            tMatVec.Push(T);
            distorMatVec.Push(distCoeffcients);


            GCHandle handlePoints = GCHandle.Alloc(points, GCHandleType.Pinned);

            CvInvoke.CvLevMarqSparseAdjustBundle(
               cameraMatrix.Length,
               points.Length, handlePoints.AddrOfPinnedObject(),
               imagePointsMat, visibilityMat, cameraMatVec, rMatVec, tMatVec, distorMatVec, ref termCrit);

            handlePoints.Free();

         }
      }
Exemple #17
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            Mat cap = new Mat();

            Mat[] imgs   = new Mat[3];
            Mat   result = new Mat();

            cap             = c1.QueryFrame();
            imageBox1.Image = cap;
            imgs[0]         = cap;
            cap             = c2.QueryFrame();
            imageBox2.Image = cap;
            imgs[1]         = cap;
            cap             = c3.QueryFrame();
            imageBox3.Image = cap;
            imgs[2]         = cap;
            Emgu.CV.Stitching.Stitcher stitcher = new Emgu.CV.Stitching.Stitcher(true);
            using (VectorOfMat vms = new VectorOfMat())
            {
                vms.Push(imgs);
                bool stitchStatus = stitcher.Stitch(vms, result);
                if (stitchStatus)
                {
                    imageBox4.Image            = result;
                    toolStripStatusLabel1.Text = "Stitch OK";
                }
                else
                {
                    toolStripStatusLabel1.Text = "Stitch error";
                }
            }
        }
        public bool ConfigRecognitionImageTrain(Mat imageTrain, Mat roiTrain, bool useGlobalMatch)
        {
            _trainsImage.Push(imageTrain);

            _keypointsImageTrain.Add(new VectorOfKeyPoint());
            _descriptorsImageTrain.Push(new Mat());

            _LODIndex = _trainsImage.Size - 1;

            SIFT sift = new SIFT();

            //Insere os pontos chaves da imagem alvo na lista de pontos chaves
            _keypointsImageTrain.Insert(_LODIndex, new VectorOfKeyPoint(sift.Detect(_trainsImage[_LODIndex], roiTrain)));
            if (_keypointsImageTrain[_LODIndex] != null && _keypointsImageTrain[_LODIndex].Size < 4)
            {
                return(false);
            }

            //Calcula os descritores dos pontos chaves extraidos, no caso se extrair poucos descritores ele return false = não reconhecido
            sift.Compute(_trainsImage[_LODIndex], _keypointsImageTrain[_LODIndex], _descriptorsImageTrain[_LODIndex]);
            if (_descriptorsImageTrain[_LODIndex].Rows < 4)
            {
                return(false);
            }

            if (useGlobalMatch)
            {
                return(true);
            }
            else
            {
                return(ConfigureImageTrainROI(_keypointsImageTrain[_LODIndex], roiTrain));
            }
        }
Exemple #19
0
        /// <summary>
        /// Stitch images together
        /// </summary>
        /// <param name="images">The list of images to stitch</param>
        /// <returns>A final stitched image</returns>
        public static Mat StichImages(List <Mat> images)
        {
            //Declare the Mat object that will store the final output
            Mat output = new Mat();

            //Declare a vector to store all images from the list
            VectorOfMat matVector = new VectorOfMat();

            //Push all images in the list into a vector
            foreach (Mat img in images)
            {
                matVector.Push(img);
            }

            //Declare a new stitcher
            Stitcher stitcher = new Stitcher();

            //Declare the type of detector that will be used to detect keypoints
            Brisk detector = new Brisk();

            //Here are some other detectors that you can try
            //ORBDetector detector = new ORBDetector();
            //KAZE detector = new KAZE();
            //AKAZE detector = new AKAZE();

            //Set the stitcher class to use the specified detector declared above
            stitcher.SetFeaturesFinder(detector);

            //Stitch the images together
            stitcher.Stitch(matVector, output);

            //Return the final stiched image
            return(output);
        }
        private void selectImagesButton_Click(object sender, EventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.CheckFileExists = true;
            dlg.Multiselect     = true;

            if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                sourceImageDataGridView.Rows.Clear();

                Image <Bgr, byte>[] sourceImages = new Image <Bgr, byte> [dlg.FileNames.Length];

                for (int i = 0; i < sourceImages.Length; i++)
                {
                    sourceImages[i] = new Image <Bgr, byte>(dlg.FileNames[i]);

                    using (Image <Bgr, byte> thumbnail = sourceImages[i].Resize(200, 200, Emgu.CV.CvEnum.Inter.Cubic, true))
                    {
                        DataGridViewRow row = sourceImageDataGridView.Rows[sourceImageDataGridView.Rows.Add()];
                        row.Cells["FileNameColumn"].Value  = dlg.FileNames[i];
                        row.Cells["ThumbnailColumn"].Value = thumbnail.ToBitmap();
                        row.Height = 200;
                    }
                }
                try
                {
                    //only use GPU if you have build the native binary from code and enabled "NON_FREE"
                    using (Stitcher stitcher = new Stitcher(false))
                        using (AKAZEFeaturesFinder finder = new AKAZEFeaturesFinder())
                        {
                            stitcher.SetFeaturesFinder(finder);
                            using (VectorOfMat vm = new VectorOfMat())
                            {
                                Mat result = new Mat();
                                vm.Push(sourceImages);
                                Stitcher.Status stitchStatus = stitcher.Stitch(vm, result);
                                if (stitchStatus == Stitcher.Status.Ok)
                                {
                                    resultImageBox.Image = result;
                                }
                                else
                                {
                                    MessageBox.Show(this, String.Format("Stiching Error: {0}", stitchStatus));
                                    resultImageBox.Image = null;
                                }
                            }
                        }
                }
                finally
                {
                    foreach (Image <Bgr, Byte> img in sourceImages)
                    {
                        img.Dispose();
                    }
                }
            }
        }
Exemple #21
0
        private void selectImagesButton_Click(object sender, EventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.CheckFileExists = true;
            dlg.Multiselect     = true;

            if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                sourceImageDataGridView.Rows.Clear();

                Image <Bgr, Byte>[] sourceImages = new Image <Bgr, byte> [dlg.FileNames.Length];

                for (int i = 0; i < sourceImages.Length; i++)
                {
                    sourceImages[i] = new Image <Bgr, byte>(dlg.FileNames[i]);

                    using (Image <Bgr, byte> thumbnail = sourceImages[i].Resize(200, 200, Emgu.CV.CvEnum.Inter.Cubic, true))
                    {
                        DataGridViewRow row = sourceImageDataGridView.Rows[sourceImageDataGridView.Rows.Add()];
                        row.Cells["FileNameColumn"].Value  = dlg.FileNames[i];
                        row.Cells["ThumbnailColumn"].Value = thumbnail.ToBitmap();
                        row.Height = 200;
                    }
                }
                try
                {
                    //using (Stitcher stitcher = new Stitcher(true))
                    //CUDA bruteforce matcher seems to cause issue in this release, not using CUDA for matching for this reason
                    using (Stitcher stitcher = new Stitcher(false))
                    {
                        using (VectorOfMat vm = new VectorOfMat())
                        {
                            Mat result = new Mat();
                            vm.Push(sourceImages);
                            Stitcher.Status stitchStatus = stitcher.Stitch(vm, result);
                            if (stitchStatus == Stitcher.Status.Ok)
                            {
                                resultImageBox.Image = result;
                            }
                            else
                            {
                                MessageBox.Show(this, String.Format("Stiching Error: {0}", stitchStatus));
                                resultImageBox.Image = null;
                            }
                        }
                    }
                }
                finally
                {
                    foreach (Image <Bgr, Byte> img in sourceImages)
                    {
                        img.Dispose();
                    }
                }
            }
        }
Exemple #22
0
 /// <summary>
 /// Train the face recognizer with the specific images and labels
 /// </summary>
 /// <param name="images">The images used in the training.</param>
 /// <param name="labels">The labels of the images.</param>
 public void Train(Mat[] images, int[] labels)
 {
     using (VectorOfMat imgVec = new VectorOfMat())
         using (VectorOfInt labelVec = new VectorOfInt(labels))
         {
             imgVec.Push(images);
             Train(imgVec, labelVec);
         }
 }
Exemple #23
0
 /// <summary>
 /// Update the face recognizer with the specific images and labels
 /// </summary>
 /// <param name="images">The images used for updating the face recognizer</param>
 /// <param name="labels">The labels of the images</param>
 public void Update(Mat[] images, int[] labels)
 {
     Debug.Assert(images.Length == labels.Length, "The number of labels must equals the number of images");
     using (VectorOfMat imgVec = new VectorOfMat())
         using (VectorOfInt labelVec = new VectorOfInt(labels))
         {
             imgVec.Push(images);
             Update(imgVec, labelVec);
         }
 }
Exemple #24
0
 /// <summary>
 /// Train the face recognizer with the specific images and labels
 /// </summary>
 /// <param name="images">The images used in the training.</param>
 /// <param name="labels">The labels of the images.</param>
 public void Train <TColor, TDepth>(Image <TColor, TDepth>[] images, int[] labels)
     where TColor : struct, IColor
     where TDepth : new()
 {
     using (VectorOfMat imgVec = new VectorOfMat())
         using (VectorOfInt labelVec = new VectorOfInt(labels))
         {
             imgVec.Push <TDepth>(images);
             Train(imgVec, labelVec);
         }
 }
Exemple #25
0
        // Equalization
        private void equalization_Click(object sender, EventArgs e)
        {
            maskDraw.Enabled = true;
            Image <Ycc, Byte> temp = new Image <Ycc, Byte>(img.Width, img.Height);

            CvInvoke.CvtColor(img, temp, ColorConversion.Rgb2YCrCb);
            Image <Gray, Byte>[] channels = temp.Split();

            channels[0]._EqualizeHist();
            VectorOfMat c = new VectorOfMat();

            c.Push(channels[0]);
            c.Push(channels[1]);
            c.Push(channels[2]);
            CvInvoke.Merge(c, temp);
            CvInvoke.CvtColor(temp, img, ColorConversion.YCrCb2Rgb);
            imgBackUp        = img.Clone();
            mainImage2.Image = img.ToBitmap();
            label1.Text      = "Status: Histogram equalization";
        }
Exemple #26
0
 public void TestVectorOfVectorOfMat()
 {
     using (Mat m = new Mat())
         using (VectorOfMat vm = new VectorOfMat())
         {
             vm.Push(m);
             using (VectorOfVectorOfMat vvm = new VectorOfVectorOfMat())
             {
                 vvm.Push(vm);
             }
         }
 }
Exemple #27
0
 /// <summary>
 /// Update the face recognizer with the specific images and labels
 /// </summary>
 /// <param name="images">The images used for updating the face recognizer</param>
 /// <param name="labels">The labels of the images</param>
 public void Update <TColor, TDepth>(Image <TColor, TDepth>[] images, int[] labels)
     where TColor : struct, IColor
     where TDepth : new()
 {
     Debug.Assert(images.Length == labels.Length, "The number of labels must equals the number of images");
     using (VectorOfMat imgVec = new VectorOfMat())
         using (VectorOfInt labelVec = new VectorOfInt(labels))
         {
             imgVec.Push(images);
             Update(imgVec, labelVec);
         }
 }
Exemple #28
0
        private void button5_Click(object sender, EventArgs e)
        {
            Stitcher    _sticher     = new Stitcher(Stitcher.Mode.Scans); //创建一个 Sticher 类。
            Mat         result_image = new Mat();                         //创建 Mat 存储输出全景图
            VectorOfMat sti_image    = new VectorOfMat();                 //创建 VectorOfMat 类型, 输入图像拼接数组

            // 添加图像到 sti_image 中, 不按照循序进行添加, 说明拼接图像与顺序无关*//
            sti_image.Push(image1);
            sti_image.Push(image2);
            sti_image.Push(image3);
            sti_image.Push(image4);
            Stitcher.Status status = _sticher.Stitch(sti_image, result_image);//进行图像拼接, 返回 bool 类型, 是否拼接成功。
            if (status == Stitcher.Status.Ok)
            {
                imageBox5.Image = result_image;//显示图像。
            }
            else
            {
                MessageBox.Show("拼接失败", "提示");
            }
        }
Exemple #29
0
        ////////////////////////////////////////////////////////////////////////////////////////////

        private void btnOpenFile_Click(object sender, EventArgs e)
        {
            OpenFileDialog ofdOpenfilesD = new OpenFileDialog();

            ofdOpenfilesD.CheckFileExists = true;
            ofdOpenfilesD.Multiselect     = true;

            if (ofdOpenfilesD.ShowDialog() != DialogResult.OK || ofdOpenfilesD.FileName == "")
            {
                MessageBox.Show("Can not read image");
            }
            else if (ofdOpenfilesD.ShowDialog() == DialogResult.OK)
            {
                dgvSourceImage.Rows.Clear();


                Image <Bgr, Byte>[] originalImagesU = new Image <Bgr, byte> [ofdOpenfilesD.FileNames.Length];

                for (int i = 0; i < originalImagesU.Length; i++)
                {
                    originalImagesU[i] = new Image <Bgr, byte>(ofdOpenfilesD.FileNames[i]);

                    using (Image <Bgr, byte> firstImage = originalImagesU[i].Resize(200, 200, Inter.Cubic, true))

                    {
                        DataGridViewRow row = dgvSourceImage.Rows[dgvSourceImage.Rows.Add()];
                        row.Cells["fileNameColumn"].Value      = ofdOpenfilesD.FileNames[i];
                        row.Cells["samplePictureColumn"].Value = firstImage.ToBitmap();
                        row.Height = 200;
                    }
                }
                try
                {
                    using (Stitcher stiching = new Stitcher(false))
                    {
                        using (VectorOfMat matVector = new VectorOfMat())
                        {
                            Mat finalImageN = new Mat();
                            matVector.Push(originalImagesU);
                            stiching.Stitch(matVector, finalImageN);
                            ibFinalImage.Image = finalImageN;
                        }
                    }
                }
                finally
                {
                    foreach (Image <Bgr, Byte> image in originalImagesU)
                    {
                        image.Dispose();
                    }
                }
            }
        }
Exemple #30
0
      private void selectImagesButton_Click(object sender, EventArgs e)
      {
         OpenFileDialog dlg = new OpenFileDialog();
         dlg.CheckFileExists = true;
         dlg.Multiselect = true;

         if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK)
         {
            sourceImageDataGridView.Rows.Clear();

            Image<Bgr, byte>[] sourceImages = new Image<Bgr, byte>[dlg.FileNames.Length];
            
            for (int i = 0; i < sourceImages.Length; i++)
            {
               sourceImages[i] = new Image<Bgr, byte>(dlg.FileNames[i]);

               using (Image<Bgr, byte> thumbnail = sourceImages[i].Resize(200, 200, Emgu.CV.CvEnum.Inter.Cubic, true))
               {
                  DataGridViewRow row = sourceImageDataGridView.Rows[sourceImageDataGridView.Rows.Add()];
                  row.Cells["FileNameColumn"].Value = dlg.FileNames[i];
                  row.Cells["ThumbnailColumn"].Value = thumbnail.ToBitmap();
                  row.Height = 200;
               }
            }
            try
            {
               using (Stitcher stitcher = new Stitcher(true))
               {
                  using (VectorOfMat vm = new VectorOfMat())
                  {
                     Mat result = new Mat();
                     vm.Push(sourceImages);
                     Stitcher.Status stitchStatus = stitcher.Stitch(vm, result);
                     if (stitchStatus == Stitcher.Status.Ok)
                        resultImageBox.Image = result;
                     else
                     {
                        MessageBox.Show(this, String.Format("Stiching Error: {0}", stitchStatus));
                        resultImageBox.Image = null;
                     }
                  }
               }
            }
            finally
            {
               foreach (Image<Bgr, Byte> img in sourceImages)
               {
                  img.Dispose();
               }
            }
         }
      }
Exemple #31
0
        private void button_hist_Click(object sender, EventArgs e)
        {
            if (Image_Target == null || Image_Texture == null)
            {
                return;
            }
            double             temp_weight                  = (double)trackBar_hist.Value / 10.0;
            Image <Gray, Byte> target_gray                  = Image_Target.Clone().Convert <Gray, Byte>();
            Image <Gray, Byte> texture_gray                 = Image_Texture.Clone().Convert <Gray, Byte>();
            Image <Gray, Byte> target_hist_matched          = new Image <Gray, Byte>(Image_Target.Size);
            Image <Gray, Byte> target_hist_matched_weighted = new Image <Gray, Byte>(Image_Texture.Size);

            Matrix <byte> histLUT      = new Matrix <byte>(1, 256);
            Mat           hist_target  = new Mat();
            Mat           hist_texture = new Mat();

            VectorOfMat vm_target  = new VectorOfMat();
            VectorOfMat vm_texture = new VectorOfMat();

            vm_target.Push(target_gray);
            vm_texture.Push(texture_gray);

            CvInvoke.CalcHist(vm_target, new int[] { 0 }, null, hist_target, new int[] { 256 }, new float[] { 0, 255 }, false);
            CvInvoke.CalcHist(vm_texture, new int[] { 0 }, null, hist_texture, new int[] { 256 }, new float[] { 0, 255 }, false);

            float[] CDF_hist_target  = new float[256];
            float[] CDF_hist_texture = new float[256];
            Marshal.Copy(hist_target.DataPointer, CDF_hist_target, 0, 256);
            Marshal.Copy(hist_texture.DataPointer, CDF_hist_texture, 0, 256);

            for (int i = 1; i < 256; i++)
            {
                CDF_hist_target[i]  += CDF_hist_target[i - 1];
                CDF_hist_texture[i] += CDF_hist_texture[i - 1];
            }

            for (int i = 0; i < 256; i++)
            {
                histLUT.Data[0, i] = 0;
                for (int j = 0; j < 256; j++)
                {
                    if (CDF_hist_texture[j] >= CDF_hist_target[i])
                    {
                        histLUT.Data[0, i] = (byte)j;
                        break;
                    }
                }
            }
            CvInvoke.LUT(target_gray, histLUT, target_hist_matched);
            target_hist_matched_weighted = target_hist_matched * temp_weight + target_gray * (1.0 - temp_weight);
            imageBox_hist.Image          = target_hist_matched_weighted;
        }
Exemple #32
0
        public Image <Bgr, byte> editColourChanel(Image <Bgr, byte> sourceImage, CheckedListBox checkedListBox)
        {
            var channel = sourceImage.Split()[checkedListBox.SelectedIndex];

            Image <Bgr, byte> destImage = sourceImage.CopyBlank();

            VectorOfMat vm = new VectorOfMat();

            for (int i = 0; i < 3; i++)
            {
                if (i == checkedListBox.SelectedIndex)
                {
                    vm.Push(channel);
                }
                else
                {
                    vm.Push(channel.CopyBlank());
                }
            }
            CvInvoke.Merge(vm, destImage);
            return(destImage);
        }
        public async Task<Stream> StitchImages(List<string> imageUrls)
        {
            if (imageUrls == null || !imageUrls.Any())
            {
                return null;
            }

            var httpClient = new HttpClient();
            var imageStreams = new List<Stream>();

            foreach (var imageUrl in imageUrls)
            {
                var imageStream = await httpClient.GetStreamAsync(imageUrl);
                imageStreams.Add(imageStream);
            }

            var imageBitmaps = new List<Bitmap>();
            foreach (var imageStream in imageStreams)
            {
                var imageBitmap = new Bitmap(imageStream);
                imageBitmaps.Add(imageBitmap);
            }

            var emguImages = new List<Image<Bgr, byte>>();
            foreach (var imageBitmap in imageBitmaps)
            {
                var image = new Image<Bgr, byte>(imageBitmap);
                emguImages.Add(image);
            }

            var arr = new VectorOfMat();
            foreach (var emguImage in emguImages)
            {
                arr.Push(emguImage.Mat);
            }

            var stitchedImage = new Mat();

            using (var stitcher = new Stitcher(false))
            {
                stitcher.Stitch(arr, stitchedImage);
            }

            var resultMemStream = new MemoryStream();

            stitchedImage.Bitmap.Save(resultMemStream, ImageFormat.Jpeg);
            resultMemStream.Position = 0;

            return resultMemStream;
        }
      private void selectImagesButton_Click(object sender, EventArgs e)
      {
         OpenFileDialog dlg = new OpenFileDialog();
         dlg.CheckFileExists = true;
         dlg.Multiselect = true;

         if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK)
         {
            sourceImageDataGridView.Rows.Clear();

            Image<Bgr, Byte>[] sourceImages = new Image<Bgr, byte>[dlg.FileNames.Length];
            
            for (int i = 0; i < sourceImages.Length; i++)
            {
               sourceImages[i] = new Image<Bgr, byte>(dlg.FileNames[i]);

               using (Image<Bgr, byte> thumbnail = sourceImages[i].Resize(200, 200, Emgu.CV.CvEnum.Inter.Cubic, true))
               {
                  DataGridViewRow row = sourceImageDataGridView.Rows[sourceImageDataGridView.Rows.Add()];
                  row.Cells["FileNameColumn"].Value = dlg.FileNames[i];
                  row.Cells["ThumbnailColumn"].Value = thumbnail.ToBitmap();
                  row.Height = 200;
               }
            }
            try
            {
               //using (Stitcher stitcher = new Stitcher(true))
               //CUDA bruteforce matcher seems to cause issue in this release, not using CUDA for matching for this reason
               using (Stitcher stitcher = new Stitcher(false))
               {
                  using (VectorOfMat vm = new VectorOfMat())
                  {
                     Mat result = new Mat();
                     vm.Push(sourceImages);
                     stitcher.Stitch(vm, result);
                     resultImageBox.Image = result;
                  }
               }
            }
            finally
            {
               foreach (Image<Bgr, Byte> img in sourceImages)
               {
                  img.Dispose();
               }
            }
         }
      }
        public async Task<HttpResponseMessage> GetPanoDemo()
        {
            try
            {
                var imgUrl1 = @"https://cs.brown.edu/courses/csci1950-g/results/proj6/edwallac/source001_01.jpg";
                var imgUrl2 = @"https://cs.brown.edu/courses/csci1950-g/results/proj6/edwallac/source001_02.jpg";

                var img1Stream = await(new HttpClient()).GetStreamAsync(imgUrl1);
                var img2Stream = await(new HttpClient()).GetStreamAsync(imgUrl2);

                var bitmap1 = new Bitmap(img1Stream);
                var bitmap2 = new Bitmap(img2Stream);

                var img1 = new Image<Bgr, byte>(bitmap1);
                var img2 = new Image<Bgr, byte>(bitmap2);

                var arr = new VectorOfMat();
                arr.Push(new[] { img1, img2 });

                var stitchedImage = new Mat();

                using (var stitcher = new Stitcher(false))
                {
                    stitcher.Stitch(arr, stitchedImage);
                }

                var resultMemStream = new MemoryStream();

                stitchedImage.Bitmap.Save(resultMemStream, ImageFormat.Jpeg);
                resultMemStream.Position = 0;

                var responseMessage = new HttpResponseMessage
                {
                    Content = new StreamContent(resultMemStream)
                    {
                        Headers =
                        {
                            ContentLength = resultMemStream.Length,
                            ContentType = new MediaTypeHeaderValue("image/jpeg"),
                            ContentDisposition = new ContentDispositionHeaderValue("attachment")
                            {
                                FileName = HttpUtility.UrlDecode("result.jpg"),
                                Size = resultMemStream.Length
                            }
                        }
                    }
                };

                return responseMessage;
            }
            catch (Exception e)
            {
                return new HttpResponseMessage(HttpStatusCode.InternalServerError)
                {
                    ReasonPhrase = e.Message
                };
            }
        }
Exemple #36
0
      public void TestStitching4()
      {
         Mat[] images = new Mat[1];

         images[0] = EmguAssert.LoadMat("stitch1.jpg");
         

         using (Stitcher stitcher = new Stitcher(false))
         //using (OrbFeaturesFinder finder = new OrbFeaturesFinder(new Size(3, 1)))
         {
            //stitcher.SetFeaturesFinder(finder);
            Mat result = new Mat();
            using (VectorOfMat vm = new VectorOfMat())
            {
               vm.Push(images);
               stitcher.Stitch(vm, result);
            }
            //Emgu.CV.UI.ImageViewer.Show(result);
         }
      }
Exemple #37
0
      public void TestHistogram()
      {
         using (Image<Bgr, Byte> img = EmguAssert.LoadImage<Bgr, Byte>("stuff.jpg"))
         using (Image<Hsv, Byte> img2 = img.Convert<Hsv, Byte>())
         {
            Image<Gray, Byte>[] HSVs = img2.Split();

            using (Mat h = new Mat())
            using (Mat bpj = new Mat())
            using (VectorOfMat vm = new VectorOfMat())
            {
               vm.Push(HSVs[0]);
               CvInvoke.CalcHist(vm, new int[] { 0 }, null, h, new int[] { 20 }, new float[] { 0, 180 }, false);
               CvInvoke.CalcBackProject(vm, new int[] { 0 }, h, bpj, new float[] { 0, 180 }, 0.1);

               //Emgu.CV.UI.HistogramViewer.Show(bpj);
               //Emgu.CV.UI.ImageViewer.Show(bpj);
               //h.Calculate(new Image<Gray, Byte>[1] { HSVs[0] }, true, null);
               //using (Image<Gray, Byte> bpj = h.BackProject(new Image<Gray, Byte>[1] { HSVs[0] }))
               //{
               //   Size sz = bpj.Size;
               //}

               //using (Image<Gray, Single> patchBpj = h.BackProjectPatch(
               //   new Image<Gray, Byte>[1] { HSVs[0] },
               //   new Size(5, 5),
               //   Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CHISQR,
               //   1.0))
               //{
               //   Size sz = patchBpj.Size;
               //}
            }

            foreach (Image<Gray, Byte> i in HSVs)
               i.Dispose();
         }
      }
Exemple #38
0
      public void TestStitching2()
      {
         Image<Bgr, Byte>[] images = new Image<Bgr, byte>[4];

         images[0] = EmguAssert.LoadImage<Bgr, Byte>("stitch1.jpg");
         images[1] = EmguAssert.LoadImage<Bgr, Byte>("stitch2.jpg");
         images[2] = EmguAssert.LoadImage<Bgr, Byte>("stitch3.jpg");
         images[3] = EmguAssert.LoadImage<Bgr, Byte>("stitch4.jpg");

         using (Stitcher stitcher = new Stitcher(false))
         using (OrbFeaturesFinder finder = new OrbFeaturesFinder(new Size(3, 1)))
         {
            stitcher.SetFeaturesFinder(finder);
            Mat result = new Mat();
            using (VectorOfMat vm = new VectorOfMat())
            {
               vm.Push(images);
               stitcher.Stitch(vm, result);
            }
            //Emgu.CV.UI.ImageViewer.Show(result);
         }
      }
Exemple #39
-1
        public static bool Sticher(IEnumerable<string> fileList, string saveFileLocation)
        {
            var imageArray = from fileName in fileList
                             select new Image<Bgr, byte>(fileName);

            try
            {

                using (var stitcher = new Stitcher(false))
                {
                    using (var vm = new VectorOfMat())
                    {
                        var result = new Mat();
                        vm.Push(imageArray.ToArray());
                        stitcher.Stitch(vm, result);
                        result.Save(saveFileLocation);
                    }
                }

                return true;
            }
            catch (Exception ex)
            {
                Logger.Error("Failed to stich !!", ex);
                return false;
            }
            finally
            {
                foreach (Image<Bgr, Byte> img in imageArray)
                {
                    img.Dispose();
                }
            }
        }