예제 #1
0
 public static Emgu.CV.Mat BitmapToEmguMat(System.Drawing.Bitmap bitmap)
 {
     Emgu.CV.Mat mat = null;
     #region 文件读写法
     //lock (locker)
     //{
     //    DateTime time = System.DateTime.Now;
     //    String tempFilePath = String.Format(@"tempBitmapToEmguMat_{0}_{1}.bmp", time.Minute, time.Second);
     //    using (System.IO.FileStream fs = System.IO.File.Create(tempFilePath))
     //    {
     //        bitmap.Save(fs, System.Drawing.Imaging.ImageFormat.Bmp);
     //        bitmap.Dispose();
     //    }
     //    mat = new Emgu.CV.Mat(tempFilePath, Emgu.CV.CvEnum.LoadImageType.AnyColor);
     //    if (System.IO.File.Exists(tempFilePath))
     //    {
     //        System.IO.File.Delete(tempFilePath);
     //    }
     //}
     using (OpenCvSharp.Mat image = bitmap.ToMat())
     {
         mat = MatOpenCVSharpToEmgu(image);
     }
     #endregion
     return(mat);
 }
예제 #2
0
 private void ComboBox_ImageConvert_SelectedIndexChanged(object sender, EventArgs e)
 {
     m_ProcessedImage = m_OriginalImage;
     if (ComboBox_ImageConvert.SelectedText.ToString().CompareTo("Color") == 0)
     {
         m_ProcessedImage = LibUtility.ClassUtility.ConvertImageColor(m_ProcessedImage, (int)LibUtility.ClassUtility.E_ImageColor.e_Color);
     }
     if (ComboBox_ImageConvert.SelectedItem.ToString().CompareTo("R") == 0)
     {
         m_ProcessedImage = LibUtility.ClassUtility.ConvertImageColor(m_ProcessedImage, (int)LibUtility.ClassUtility.E_ImageColor.e_R);
     }
     if (ComboBox_ImageConvert.SelectedItem.ToString().CompareTo("G") == 0)
     {
         m_ProcessedImage = LibUtility.ClassUtility.ConvertImageColor(m_ProcessedImage, (int)LibUtility.ClassUtility.E_ImageColor.e_G);
     }
     if (ComboBox_ImageConvert.SelectedItem.ToString().CompareTo("B") == 0)
     {
         m_ProcessedImage = LibUtility.ClassUtility.ConvertImageColor(m_ProcessedImage, (int)LibUtility.ClassUtility.E_ImageColor.e_B);
     }
     if (ComboBox_ImageConvert.SelectedItem.ToString().CompareTo("Gray") == 0)
     {
         m_ProcessedImage = LibUtility.ClassUtility.ConvertImageColor(m_ProcessedImage, (int)LibUtility.ClassUtility.E_ImageColor.e_Gray);
     }
     ZoomPanROIPictureBox_ProcessedImage.Image = m_ProcessedImage.Bitmap;
 }
예제 #3
0
        static Dictionary <string, Emgu.CV.Mat> loadDescriptors(string archivename)
        {
            var result = new Dictionary <string, Emgu.CV.Mat>();

            using (var fstream = new FileStream(archivename, FileMode.Open))
                using (var archive = new ZipArchive(fstream, ZipArchiveMode.Read)) {
                    foreach (var file in archive.Entries)
                    {
                        using (var stream = file.Open())
                            using (var reader = new BinaryReader(stream)) {
                                var bytes = reader.ReadBytes((int)file.Length);
                                var des   = new Emgu.CV.Mat(bytes.Length / 4 / 128, 128, Emgu.CV.CvEnum.DepthType.Cv32F, 1);
                                unsafe
                                {
                                    fixed(byte *ptr = bytes)
                                    {
                                        Buffer.MemoryCopy(ptr, des.DataPointer.ToPointer(), bytes.Length, bytes.Length);
                                    }
                                }
                                result[file.Name] = des;
                            }
                    }
                }
            return(result);
        }
예제 #4
0
        public static Matrix <double> ToMathNetMatrix(this Emgu.CV.Mat mat, bool isAppend = false)
        {
            Emgu.CV.Matrix <double> matrix = new Emgu.CV.Matrix <double>(mat.Rows, mat.Cols);
            mat.CopyTo(matrix);

            double[,] array;
            if (isAppend)
            {
                array = new double[mat.Rows + 1, mat.Cols];
            }
            else
            {
                array = new double[mat.Rows, mat.Cols];
            }

            for (int i = 0; i < mat.Rows; i++)
            {
                for (int j = 0; j < mat.Cols; j++)
                {
                    array[i, j] = matrix[i, j];
                }
            }

            if (isAppend)
            {
                for (int j = 0; j < mat.Cols - 1; j++)
                {
                    array[mat.Rows, j] = 0;
                }
                array[mat.Rows, mat.Cols - 1] = 1;
            }

            return(DenseMatrix.OfArray(array));
        }
예제 #5
0
        private void Calibrate()
        {
            if (m_skeletonCalibPoints.Count == m_calibPoints.Count)
            {
                //seketon 3D positions --> 3d positions in depth camera
                Point3D p0 = convertSkeletonPointToDepthPoint(m_skeletonCalibPoints[0]);
                Point3D p1 = convertSkeletonPointToDepthPoint(m_skeletonCalibPoints[1]);
                Point3D p2 = convertSkeletonPointToDepthPoint(m_skeletonCalibPoints[2]);
                Point3D p3 = convertSkeletonPointToDepthPoint(m_skeletonCalibPoints[3]);

                //3d positions depth camera --> positions on a 2D plane
                Vector3D v1 = p1 - p0;
                v1.Normalize();

                Vector3D v2 = p2 - p0;
                v2.Normalize();

                Vector3D planeNormalVec = Vector3D.CrossProduct(v1, v2);
                planeNormalVec.Normalize();

                Vector3D resultingPlaneNormal = new Vector3D(0, 0, 1);
                m_groundPlaneTransform = Util.make_align_axis_matrix(resultingPlaneNormal, planeNormalVec);

                Point3D p0OnPlane = m_groundPlaneTransform.Transform(p0);
                Point3D p1OnPlane = m_groundPlaneTransform.Transform(p1);
                Point3D p2OnPlane = m_groundPlaneTransform.Transform(p2);
                Point3D p3OnPlane = m_groundPlaneTransform.Transform(p3);

                //2d plane positions --> exact 2d square on screen (using perspective transform)
                System.Drawing.PointF[] src = new System.Drawing.PointF[4];
                src[0] = new System.Drawing.PointF((float)p0OnPlane.X, (float)p0OnPlane.Y);
                src[1] = new System.Drawing.PointF((float)p1OnPlane.X, (float)p1OnPlane.Y);
                src[2] = new System.Drawing.PointF((float)p2OnPlane.X, (float)p2OnPlane.Y);
                src[3] = new System.Drawing.PointF((float)p3OnPlane.X, (float)p3OnPlane.Y);

                System.Drawing.PointF[] dest = new System.Drawing.PointF[4];
                dest[0] = new System.Drawing.PointF((float)m_calibPoints[0].X, (float)m_calibPoints[0].Y);
                dest[1] = new System.Drawing.PointF((float)m_calibPoints[1].X, (float)m_calibPoints[1].Y);
                dest[2] = new System.Drawing.PointF((float)m_calibPoints[2].X, (float)m_calibPoints[2].Y);
                dest[3] = new System.Drawing.PointF((float)m_calibPoints[3].X, (float)m_calibPoints[3].Y);

                Emgu.CV.Mat transform = Emgu.CV.CvInvoke.GetPerspectiveTransform(src, dest);

                m_transform = new Emgu.CV.Matrix <double>(transform.Rows, transform.Cols, transform.NumberOfChannels);
                transform.CopyTo(m_transform);

                //test to see if resulting perspective transform is correct
                //tResultx should be same as points in m_calibPoints
                Point tResult0 = kinectToProjectionPoint(m_skeletonCalibPoints[0]);
                Point tResult1 = kinectToProjectionPoint(m_skeletonCalibPoints[1]);
                Point tResult2 = kinectToProjectionPoint(m_skeletonCalibPoints[2]);
                Point tResult3 = kinectToProjectionPoint(m_skeletonCalibPoints[3]);

                txtCalib.Text = tResult0.ToString(CultureInfo.InvariantCulture) + ";\n" +
                                tResult1.ToString(CultureInfo.InvariantCulture) + ";\n" +
                                tResult2.ToString(CultureInfo.InvariantCulture) + ";\n" +
                                tResult3.ToString(CultureInfo.InvariantCulture);
            }
        }
 public static int RecognizeImg(
     out CarDataStruct answer,
     Emgu.CV.Mat img,
     string fileName)
 {
     return(CarModelRecognize.RecognizeImgCpp(
                out answer,
                img,
                fileName,
                -1));
 }
        /// <summary>
        /// Find a barcode in image. Returns a bounding box.
        /// </summary>
        /// <param name="file"></param>
        /// <returns></returns>
        public static System.Drawing.Rectangle FindBarcodeBoundingBox(System.IO.FileInfo file)
        {
            if (file is null)
            {
                throw new System.ArgumentNullException(nameof(file));
            }

            using (var sourceMat = new Emgu.CV.Mat(fileName: file.FullName))
            {
                return(sourceMat.GetBarcodeBoundingBox());
            }
        }
예제 #8
0
        public static OpenCvSharp.Mat MatEmguToOpenCVSharp(Emgu.CV.Mat emguMat)
        {
            #region 预留,bitmap转换,可能丢失某些信息
            //var opcvsMat = OpenCvSharp.Extensions.BitmapConverter.ToMat(emguMat.Bitmap);
            //return opcvsMat;
            #endregion

            #region 正在应用,Emgu指针,new OpenCvSharp.Mat(IntPtr)
            var ptrMat = new OpenCvSharp.Mat(emguMat.Ptr);
            return(ptrMat);

            #endregion
        }
 public DialDefinition(Emgu.CV.Mat dial)
 {
     InitializeComponent();
     this.parent = (MainWindow)Application.Current.MainWindow;
     if (dial != null)
     {
         dialImage.Source = Utils.ToBitmapSource(dial);
     }
     else
     {
         dialImage.Source = null;
     }
 }
예제 #10
0
        public static Emgu.CV.Mat  ConvertImageColor(Emgu.CV.Mat f_Image, int ConvertTo)
        {
            Emgu.CV.Mat t_ResultImage = new Emgu.CV.Mat();
            t_ResultImage = f_Image;
            if (f_Image.NumberOfChannels < 3)
            {
                return(t_ResultImage);
            }
            switch (ConvertTo)
            {
            case (int)E_ImageColor.e_Color:
            {
                t_ResultImage = f_Image;
                break;
            }

            case (int)E_ImageColor.e_R:
            {
                Emgu.CV.Mat[] t_ImageSplit = f_Image.Split();
                t_ResultImage = t_ImageSplit[2];
                break;
            }

            case (int)E_ImageColor.e_G:
            {
                Emgu.CV.Mat[] t_ImageSplit = f_Image.Split();
                t_ResultImage = t_ImageSplit[1];
                break;
            }

            case (int)E_ImageColor.e_B:
            {
                Emgu.CV.Mat[] t_ImageSplit = f_Image.Split();
                t_ResultImage = t_ImageSplit[0];
                break;
            }

            case (int)E_ImageColor.e_Gray:
            {
                f_Image.ConvertTo(t_ResultImage, Emgu.CV.CvEnum.DepthType.Cv8U);
                break;
            }

            default:
            {
                t_ResultImage = f_Image;
                break;
            }
            }
            return(t_ResultImage);
        }
예제 #11
0
        private void Button_OpenImage_Click(object sender, EventArgs e)
        {
            string t_ImagePath = LibUtility.ClassUtility.OpenImageFile();

            m_OriginalImage  = Emgu.CV.CvInvoke.Imread(t_ImagePath, Emgu.CV.CvEnum.ImreadModes.Unchanged);
            m_ProcessedImage = m_OriginalImage.Clone();
            //Emgu.CV.Mat ttImage;
            //ttImage = LibUtility.ClassUtility.ConvertImageColor(m_ProcessedImage, 2);
            ZoomPanROIPictureBox_OriginalImage.Image  = m_OriginalImage.Bitmap;
            ZoomPanROIPictureBox_ProcessedImage.Image = m_ProcessedImage.Bitmap;

            ZoomPanROIPictureBox_OriginalImage.FitImageToCenter();
            ZoomPanROIPictureBox_ProcessedImage.FitImageToCenter();
        }
        /// <summary>
        /// Find a barcode in image. Returns a bounding box.
        /// </summary>
        /// <param name="sourceMat"></param>
        /// <returns>bounding box</returns>
        public static System.Drawing.Rectangle GetBarcodeBoundingBox(this Emgu.CV.Mat sourceMat)
        {
            if (sourceMat is null)
            {
                throw new System.ArgumentNullException(nameof(sourceMat));
            }

            return(sourceMat.GetGrayscale()
                   .GetGradient()
                   .SimplifyImage()
                   .GetLargestFeature()
                   .GetBoundingBox(maxWidth: sourceMat.Width,
                                   maxHeight: sourceMat.Height));
        }
예제 #13
0
            private void Packet()
            {
                AudioFrame dblpPacketFrame = new AudioFrame(2, 1024, AVSampleFormat.AV_SAMPLE_FMT_S64P, 44100);

                Emgu.CV.Mat cv64fH2C1 = dblpPacketFrame.ToMat(); // Cv64F, width 1024, height 2, number of channel 1
                var         data      = cv64fH2C1.GetData();

                int[] lengths = new int[data.Rank];
                for (int i = 0; i < lengths.Length; i++)
                {
                    lengths[i] = data.GetLength(i);
                }
                var output = Array.CreateInstance(typeof(long), lengths);

                Buffer.BlockCopy(data, 0, output, 0, data.Length * sizeof(long)); // output is long[2,1024]
            }
예제 #14
0
        public Proposition(List <int> entiers, Emgu.CV.Mat mat, string txt, BoutonsCirculaires bc, List <Bouton> boutons)
        {
            System.Windows.Media.Imaging.BitmapSource img = BoutonsCirculaires.ToBitmapSource(mat);
            this.entiers = entiers;
            pUC          = new Proposition_UC();
            pUC.LINK(this);
            somme     = entiers.Sum();
            signature = SetSignature(entiers);

            pUC.wpfImage.Width  = img.Width;
            pUC.wpfImage.Source = img;
            pUC.lbl.Content     = txt;

            this.boutons = boutons;
            this.bc      = bc;
        }
예제 #15
0
        private static void RunExample(System.IO.FileInfo file)
        {
            if (file is null)
            {
                throw new System.ArgumentNullException(nameof(file));
            }

            var outputFilepath = System.IO.Path.ChangeExtension(file.FullName, $"OUT{file.Extension}");

            using (var sourceMat = new Emgu.CV.Mat(fileName: file.FullName))
            {
                var boundingBox = InitiatorProject.Barcode.BarcodeDetector.GetBarcodeBoundingBox(sourceMat: sourceMat);

                var outputImage = sourceMat.ToImage <Emgu.CV.Structure.Bgr, System.Single>();

                outputImage.DrawBoxOutline(boundingBox: boundingBox).Save(fileName: outputFilepath);
            }
        }
예제 #16
0
 public static Emgu.CV.Mat BitmapSourceToEmguCvMat(System.Windows.Media.Imaging.BitmapSource source)
 {
     if (source.Format == PixelFormats.Bgra32)
     {
         Emgu.CV.Mat result = new Emgu.CV.Mat();
         result.Create(source.PixelHeight, source.PixelWidth, Emgu.CV.CvEnum.DepthType.Cv8U, 4);
         source.CopyPixels(Int32Rect.Empty, result.DataPointer, result.Step * result.Rows, result.Step);
         return(result);
     }
     else if (source.Format == PixelFormats.Bgr24)
     {
         Emgu.CV.Mat result = new Emgu.CV.Mat();
         result.Create(source.PixelHeight, source.PixelWidth, Emgu.CV.CvEnum.DepthType.Cv8U, 3);
         source.CopyPixels(Int32Rect.Empty, result.DataPointer, result.Step * result.Rows, result.Step);
         return(result);
     }
     else
     {
         throw new Exception(String.Format("Convertion from BitmapSource of format {0} is not supported.", source.Format));
     }
 }
예제 #17
0
 public void EmguCvCaptureVideoStream(string url, Action <Emgu.CV.Mat> action)
 {
     using Emgu.CV.VideoCapture capture = new Emgu.CV.VideoCapture(url, Emgu.CV.VideoCapture.API.Any);
     if (capture.IsOpened == false)
     {
         return;
     }
     using Emgu.CV.Mat frame = capture.QueryFrame();
     while (_stopPlay == false)
     {
         if (capture.Read(frame)) //capture.Retrieve(frame)// 无法继续读取
         {
             if (!frame.IsEmpty)
             {
                 action?.Invoke(frame);
             }
         }
     }
     frame.Dispose();
     capture.Dispose();
 }
예제 #18
0
        public static Emgu.CV.Mat MatOpenCVSharpToEmgu(OpenCvSharp.Mat opcvsMat)
        {
            #region convert through bytes
            var emguMat = new Emgu.CV.Mat();
            Emgu.CV.CvInvoke.Imdecode(opcvsMat.ToBytes(), ImreadModes.AnyColor, emguMat);
            return(emguMat);

            #endregion

            #region Obsolte,convert through bitmap,可能丢失某些信息
            //System.Drawing.Bitmap bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(opcvsMat.ToIplImage());
            //var img = new Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte>(bitmap);
            //return img.Mat;
            #endregion

            #region Error,OpenCvSharp CvPtr指针 Emgu CvArrToMat
            //Error message : OpenCV: Unknown array type
            //Error reason : the IntPtr parameter of CvArrToMat must be IplImage or CvMatND
            //var emguMat = Emgu.CV.CvInvoke.CvArrToMat(opcvsMat.CvPtr, true);
            //return emguMat;
            #endregion
        }
예제 #19
0
        static void CreateDescriptions(string dirname, string message)
        {
            Console.Write("Creating descriptions for the {0:s}...", message);
            using (var stream = new MemoryStream()) {
                using (var archive = new ZipArchive(stream, ZipArchiveMode.Create, true)) {
                    foreach (var filename in Directory.GetFiles(dirname, "*.png"))
                    {
                        var heroname = Path.GetFileNameWithoutExtension(filename);
                        using (var portrait = new Emgu.CV.Image <Emgu.CV.Structure.Bgra, byte>(filename))
                            using (var kp = new Emgu.CV.Util.VectorOfKeyPoint())
                                using (var des = new Emgu.CV.Mat()) {
                                    sift.DetectAndCompute(portrait, null, kp, des, false);

                                    var file = archive.CreateEntry(heroname, CompressionLevel.Optimal);
                                    using (var fstream = file.Open())
                                        using (var writer = new BinaryWriter(fstream)) {
                                            var bytes = new byte[des.Cols * des.Rows * des.ElementSize];
                                            unsafe
                                            {
                                                fixed(byte *ptr = bytes)
                                                {
                                                    Buffer.MemoryCopy(des.DataPointer.ToPointer(), ptr, bytes.Length, bytes.Length);
                                                }
                                            }
                                            writer.Write(bytes);
                                        }
                                }
                    }
                }

                using (var fstream = new FileStream(dirname + ".zip", FileMode.Create)) {
                    stream.Seek(0, SeekOrigin.Begin);
                    stream.CopyTo(fstream);
                }
            }
            Console.WriteLine("Done.");
        }
예제 #20
0
 public void LoadImgByEmguCvMat(Emgu.CV.Mat frame)
 {
     using System.Drawing.Bitmap bitmap = frame.ToBitmap();
     System.Drawing.Imaging.BitmapData data = bitmap.LockBits(
         new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height),
         System.Drawing.Imaging.ImageLockMode.ReadOnly,
         System.Drawing.Imaging.PixelFormat.Format32bppArgb);
     Dispatcher.Invoke(() =>
     {
         if (_writeBitmap == null)
         {
             _writeBitmap = new WriteableBitmap(frame.Width, frame.Height, 96, 96, PixelFormats.Bgra32, null);
             pImg.Source  = _writeBitmap;
             _rect        = new Int32Rect(0, 0, bitmap.Width, bitmap.Height);
         }
         _writeBitmap.Lock();
         _writeBitmap.WritePixels(_rect, data.Scan0, (4 * data.Width * data.Height), data.Stride);
         //Marshal.Copy(data.Scan0,_writeBitmap.BackBuffer,0,1);
         //_writeBitmap.AddDirtyRect(rec);
         _writeBitmap.Unlock();
     });
     bitmap.UnlockBits(data);
     bitmap.Dispose();
 }
예제 #21
0
 public unsafe static extern int RecognizeImgCpp(
     out CarDataStruct answer,
     Emgu.CV.Mat img,
     string fileName,
     int mode);
        private void Calibrate()
        {
            if (m_skeletonCalibPoints.Count == m_calibPoints.Count
                // We need at least for points to map a rectangular region.
                && m_skeletonCalibPoints.Count == 4)
            {
                //seketon 3D positions --> 3d positions in depth camera
                Point3D p0 = ConvertSkeletonPointToDepthPoint(m_kinectSensor, m_skeletonCalibPoints[0]);
                Point3D p1 = ConvertSkeletonPointToDepthPoint(m_kinectSensor, m_skeletonCalibPoints[1]);
                Point3D p2 = ConvertSkeletonPointToDepthPoint(m_kinectSensor, m_skeletonCalibPoints[2]);
                Point3D p3 = ConvertSkeletonPointToDepthPoint(m_kinectSensor, m_skeletonCalibPoints[3]);

                //3d positions depth camera --> positions on a 2D plane
                Vector3D v1 = p1 - p0;
                v1.Normalize();

                Vector3D v2 = p2 - p0;
                v2.Normalize();

                Vector3D planeNormalVec = Vector3D.CrossProduct(v1, v2);
                planeNormalVec.Normalize();

                Vector3D resultingPlaneNormal = new Vector3D(0, 0, 1);
                m_groundPlaneTransform = Util.Make_align_axis_matrix(resultingPlaneNormal, planeNormalVec);

                Point3D p0OnPlane = m_groundPlaneTransform.Transform(p0);
                Point3D p1OnPlane = m_groundPlaneTransform.Transform(p1);
                Point3D p2OnPlane = m_groundPlaneTransform.Transform(p2);
                Point3D p3OnPlane = m_groundPlaneTransform.Transform(p3);

                //2d plane positions --> exact 2d square on screen (using perspective transform)
                System.Drawing.PointF[] src = new System.Drawing.PointF[4];
                src[0] = new System.Drawing.PointF((float)p0OnPlane.X, (float)p0OnPlane.Y);
                src[1] = new System.Drawing.PointF((float)p1OnPlane.X, (float)p1OnPlane.Y);
                src[2] = new System.Drawing.PointF((float)p2OnPlane.X, (float)p2OnPlane.Y);
                src[3] = new System.Drawing.PointF((float)p3OnPlane.X, (float)p3OnPlane.Y);

                System.Drawing.PointF[] dest = new System.Drawing.PointF[4];
                dest[0] = new System.Drawing.PointF((float)m_calibPoints[0].X, (float)m_calibPoints[0].Y);
                dest[1] = new System.Drawing.PointF((float)m_calibPoints[1].X, (float)m_calibPoints[1].Y);
                dest[2] = new System.Drawing.PointF((float)m_calibPoints[2].X, (float)m_calibPoints[2].Y);
                dest[3] = new System.Drawing.PointF((float)m_calibPoints[3].X, (float)m_calibPoints[3].Y);

                Emgu.CV.Mat transform = Emgu.CV.CvInvoke.GetPerspectiveTransform(src, dest);

                m_transform = new Emgu.CV.Matrix <double>(transform.Rows, transform.Cols, transform.NumberOfChannels);
                transform.CopyTo(m_transform);

                m_calibrationStatus = CalibrationStep.Calibrated;

                //test to see if resulting perspective transform is correct
                //tResultx should be same as points in m_calibPoints
                //Point tResult0 = KinectToProjectionPoint(m_skeletonCalibPoints[0]);
                //Point tResult1 = KinectToProjectionPoint(m_skeletonCalibPoints[1]);
                //Point tResult2 = KinectToProjectionPoint(m_skeletonCalibPoints[2]);
                //Point tResult3 = KinectToProjectionPoint(m_skeletonCalibPoints[3]);

                //Debug.Assert(tResult0.Equals(m_calibPoints[0]));
                //Debug.Assert(tResult1.Equals(m_calibPoints[1]));
                //Debug.Assert(tResult2.Equals(m_calibPoints[2]));
                //Debug.Assert(tResult3.Equals(m_calibPoints[3]));
            }
        }
예제 #23
0
        public async void start()
        {
            if (didStart)
            {
                return;
            }

            if (this.CarDidEnter == null)
            {
                CarDidEnter = CarDidEnterDefault;
            }
            if (this.CarDidLeave == null)
            {
                CarDidLeave = CarDidLeaveDefault;
            }
            if (this.CarProcessingDone == null)
            {
                CarProcessingDone = CarProcessingDoneDefault;
            }

            //flag to note process started to prevent changing peramaters that would break the processing
            didStart = true;

            //sets up the image matrixs for the input raw and output processed
            Emgu.CV.Mat iMatrix = new Emgu.CV.Mat();
            Emgu.CV.Mat oMatrix = new Emgu.CV.Mat();

            await Task.Run(() =>
            {
                //creates a window if desired, window for testing purposes
                if (showWindow)
                {
                    Emgu.CV.CvInvoke.NamedWindow("Car Detection Test", Emgu.CV.CvEnum.NamedWindowType.FreeRatio);
                }


                //This async task continually pulls the video frame to be processed.
                Task.Run(() =>
                {
                    for (; ;)
                    {
                        try
                        {
                            if (didStop)
                            {
                                return;
                            }
                            vCapture.Read(iMatrix);
                            if (iMatrix == null)
                            {
                                iMatrix.Dispose(); iMatrix = null; return;
                            }
                            if (iMatrix.IsEmpty)
                            {
                                iMatrix.Dispose(); iMatrix = null; return;
                            }
                            //System.Console.Out.WriteLine(iMatrix.Size.ToString());
                            // Emgu.CV.CvInvoke.WaitKey((int)(1000/(fps)));
                            //double FrameRate = vCapture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps);
                            //System.Diagnostics.Debug.WriteLine(FrameRate);
                            if (fps > 0)
                            {
                                Thread.Sleep((int)(1000.0 / fps));
                            }
                        }
                        catch (Exception e) { return; }
                    }
                });

                //This async task continually process the pulled frames
                for (; ;)
                {
                    try
                    {
                        //If the matrix used to store the frames is not empty
                        if (iMatrix == null)
                        {
                            oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                            {
                                Emgu.CV.CvInvoke.DestroyAllWindows();
                            }
                            return;
                        }
                        if (!iMatrix.IsEmpty)
                        {
                            didEnter = true;

                            if (didStop)
                            {
                                oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                return;
                            }
                            //Converts the contents of the imatrix to greyscale, export results to omatrix
                            //this is to ensure proper processing
                            Emgu.CV.CvInvoke.CvtColor(iMatrix, oMatrix, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                            //Uses the cascade xml file provided in the initalizer to draw rectangles arround possible canditates.
                            Rectangle[] rects = casc.DetectMultiScale(oMatrix, 1.01, 5, new Size(700, 700), new Size(1100, 1100));

                            //removes the image from the out matrix if one exists to make room for the new one.
                            if (oMatrix == null)
                            {
                                oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                return;
                            }
                            if (oMatrix.IsEmpty)
                            {
                                oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                return;
                            }

                            for (int i = 0; i < oMatrix.Total.ToInt32(); i++)
                            {
                                oMatrix.PopBack(1);
                            }


                            //sets inital value to zero
                            int carsInFrame = 0;

                            //loops through all of the rectangles in the discorvered object array
                            foreach (Rectangle rect in rects)
                            {
                                //draws the rectangles on the imatrix image for display if we wish to show the window
                                //we use imatrix as it is in color
                                if (showWindow)
                                {
                                    var x = rect.X;
                                    var y = rect.Y;
                                    var w = rect.Width;
                                    var h = rect.Height;
                                    Emgu.CV.CvInvoke.Rectangle(iMatrix, new Rectangle(x, y, w, h), new Emgu.CV.Structure.MCvScalar(50));
                                }
                                //increase the number of cars in frame for each iteration
                                carsInFrame++;
                            }

                            if (carsInFrame == numCarsInLot)
                            {
                                steady++;
                            }
                            else
                            {
                                steady = 0;
                            }

                            //update the number of cars
                            numCarsInLot = carsInFrame;

                            //if the number of cars has changed
                            //call the proper delagte the necessary amount of times
                            if (carsInFrame > oldNumCarsInLot && steady > 20)
                            {
                                for (int i = 0; i < carsInFrame - oldNumCarsInLot; i++)
                                {
                                    dispatcher.Invoke(CarDidEnter, this);
                                }
                                oldNumCarsInLot = numCarsInLot;
                            }
                            if (carsInFrame < oldNumCarsInLot && steady > 20)
                            {
                                for (int i = 0; i < oldNumCarsInLot - carsInFrame; i++)
                                {
                                    dispatcher.Invoke(CarDidLeave, this);
                                }
                                oldNumCarsInLot = numCarsInLot;
                            }

                            //if the show window flag is true we push the drawn images to the window
                            if (showWindow)
                            {
                                if (iMatrix == null)
                                {
                                    oMatrix.Dispose();; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                    {
                                        Emgu.CV.CvInvoke.DestroyAllWindows();
                                    }
                                    return;
                                }
                                if (iMatrix.IsEmpty)
                                {
                                    oMatrix.Dispose();; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                    {
                                        Emgu.CV.CvInvoke.DestroyAllWindows();
                                    }
                                    return;
                                }
                                Emgu.CV.CvInvoke.Imshow("Car Detection Test", iMatrix);
                            }

                            //discard the now rendered frame
                            if (iMatrix == null)
                            {
                                oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                return;
                            }
                            if (iMatrix.IsEmpty)
                            {
                                oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                return;
                            }
                            iMatrix.PopBack(1);

                            //Distroys windows and stops loop if the escape key is pressed
                            if (showWindow && Emgu.CV.CvInvoke.WaitKey(33) == 27)
                            {
                                if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                break;
                            }
                            if (didStop)
                            {
                                oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                {
                                    Emgu.CV.CvInvoke.DestroyAllWindows();
                                }
                                return;
                            }
                        }
                        else if (didEnter)
                        {
                            oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                            {
                                Emgu.CV.CvInvoke.DestroyAllWindows();
                            }
                            return;
                        }
                    }
                    catch (Exception e) { oMatrix.Dispose(); oMatrix = null; dispatcher.Invoke(CarProcessingDone, this); if (showWindow)
                                          {
                                              Emgu.CV.CvInvoke.DestroyAllWindows();
                                          }
                                          return; }
                }
            });
        }
예제 #24
0
 public Emgu.CV.Mat getROIImage(Emgu.CV.Mat inImage)
 {
     return(new Emgu.CV.Mat(inImage, getRect(inImage.Width, inImage.Height)));
 }
예제 #25
0
        public static S_OCR_Result DoReconizeOCR(ref Emgu.CV.OCR.Tesseract f_OCR, Emgu.CV.Mat f_Mat)
        {
            Emgu.CV.Mat t_Mat = new Emgu.CV.Mat();
            if (f_Mat.NumberOfChannels != 1)
            {
                f_Mat.ConvertTo(t_Mat, Emgu.CV.CvEnum.DepthType.Cv8U);
            }
            else
            {
                t_Mat = f_Mat.Clone();
            }
            f_OCR.SetImage(t_Mat);
            f_OCR.Recognize();
            S_OCR_Result t_OCR_Result;

            t_OCR_Result.s_LineRectangle = new List <Rectangle>();
            t_OCR_Result.s_LineString    = new List <string>();
            t_OCR_Result.s_HOCR          = f_OCR.GetHOCRText();
            String t_GetUTF8Text = f_OCR.GetUTF8Text();

            String[] t_SpilitUTF8Text = t_GetUTF8Text.Split(System.Environment.NewLine.ToCharArray());
            for (int i = 0; i < t_SpilitUTF8Text.Length; i++)
            {
                t_SpilitUTF8Text[i] = t_SpilitUTF8Text[i].Trim();
                if (t_SpilitUTF8Text[i].Length > 0)
                {
                    t_OCR_Result.s_LineString.Add(t_SpilitUTF8Text[i]);
                }
            }
            Emgu.CV.OCR.Tesseract.Character[] t_Characters = f_OCR.GetCharacters();

            int       t_X = 0, t_Y = 0, t_Width = 0, t_Height = 0;
            bool      t_IsFirstWord = true;
            string    t_Text;
            Rectangle t_Region;

            for (int i = 0; i < t_Characters.Length; i++)
            {
                Emgu.CV.OCR.Tesseract.Character t_Character = t_Characters[i];
                t_Text   = t_Character.Text;
                t_Region = t_Character.Region;
                if (t_Character.Text == " ")
                {
                    continue;
                }
                //Emgu.CV.OCR.Tesseract.Character t_LastCharacter = t_Characters[i];
                if (t_IsFirstWord == true)
                {
                    t_IsFirstWord = false;
                    t_X           = t_Character.Region.X;
                    t_Y           = t_Character.Region.Y;
                }
                if (t_Character.Text == System.Environment.NewLine.ToString())
                {
                    t_IsFirstWord = true;
                    t_X           = t_Character.Region.X;
                    t_Y           = t_Character.Region.Y;
                    t_OCR_Result.s_LineRectangle.Add(new Rectangle(t_X, t_Y, t_Width, t_Height));
                }
            }



            return(t_OCR_Result);
        }
예제 #26
0
 public static void ShowImage(Emgu.CV.Mat mat, String windowName = "EmguCVMat")
 {
     Emgu.CV.CvInvoke.Imshow(windowName, mat);
 }
예제 #27
0
 public void LoadTemplate()
 {
     TemplateImage = Emgu.CV.CvInvoke.Imread(templatePath + ".jpg", Emgu.CV.CvEnum.LoadImageType.Color);
     TemplateMask  = Emgu.CV.CvInvoke.Imread(templatePath + "_mask.jpg", Emgu.CV.CvEnum.LoadImageType.Color);
 }
예제 #28
0
 public static void LoadMatFromFile(String fileName, ref Emgu.CV.Mat mat)
 {
     mat = new Emgu.CV.Mat(fileName, Emgu.CV.CvEnum.ImreadModes.AnyColor);
 }
예제 #29
0
        private void Worker_DoWork(object sender, System.ComponentModel.DoWorkEventArgs e)
        {
            Image image   = (Image)e.Argument;
            var   minSize = new Size(3840, 2160);
            Size  newSize;

            if (image.Width < minSize.Width || image.Height < minSize.Height)
            {
                var ratio = Math.Max((double)minSize.Width / image.Width, (double)minSize.Height / image.Height);
                newSize = new Size((int)(ratio * image.Width), (int)(ratio * image.Height));
            }
            else
            {
                newSize = image.Size;
            }
            var newRect = new Rectangle(Point.Empty, newSize);

            Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> cvImage;
            using (var bitmap = new Bitmap(newSize.Width, newSize.Height, PixelFormat.Format24bppRgb)) {
                using (var graphics = Graphics.FromImage(bitmap)) {
                    graphics.CompositingQuality = CompositingQuality.HighQuality;
                    graphics.InterpolationMode  = InterpolationMode.HighQualityBicubic;
                    graphics.SmoothingMode      = SmoothingMode.HighQuality;
                    graphics.PixelOffsetMode    = PixelOffsetMode.HighQuality;

                    using (var wrapMode = new ImageAttributes()) {
                        wrapMode.SetWrapMode(WrapMode.TileFlipXY);
                        graphics.DrawImage(image, newRect, 0, 0, image.Width, image.Height, GraphicsUnit.Pixel, wrapMode);
                    }
                }

                Invoke(new Action(() => {
                    if (screenshotViewer == null)
                    {
                        screenshotViewer = new ScreenshotViewer(this)
                        {
                            Top = Top, Left = Right
                        }
                    }
                    ;
                    screenshotViewer.SetImage(new Bitmap(bitmap));
                    screenshotViewer.Show();
                }));

                var data   = bitmap.LockBits(newRect, ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
                var nBytes = data.Stride * data.Height;
                cvImage = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte>(newSize);
                unsafe {
                    Buffer.MemoryCopy(data.Scan0.ToPointer(), cvImage.Mat.DataPointer.ToPointer(), nBytes, nBytes);
                }
                bitmap.UnlockBits(data);
            }

            if (sift == null)
            {
                sift = new Emgu.CV.Features2D.SIFT(edgeThreshold: 25, sigma: 1.2);
            }
            if (matcher == null)
            {
                var use_bf = true;
                if (use_bf)
                {
                    matcher = new Emgu.CV.Features2D.BFMatcher(Emgu.CV.Features2D.DistanceType.L2);
                }
                else
                {
                    matcher = new Emgu.CV.Features2D.FlannBasedMatcher(new Emgu.CV.Flann.KdTreeIndexParams(5), new Emgu.CV.Flann.SearchParams());
                }
            }

            if (heroDescriptors == null)
            {
                Invoke(new Action(() => {
                    screenshotViewer.SetProgress(Stage.LoadingData);
                }));
                heroDescriptors   = loadDescriptors("portraits.zip");
                bgnameDescriptors = loadDescriptors("bgnames.zip");
            }

            int nTotal   = heroDescriptors.Count + bgnameDescriptors.Count;
            int nCurrent = 0;

            using (var kp = new Emgu.CV.Util.VectorOfKeyPoint())
                using (var des = new Emgu.CV.Mat()) {
                    Invoke(new Action(() => {
                        screenshotViewer.SetProgress(Stage.ProcessingImage);
                    }));
                    sift.DetectAndCompute(cvImage, null, kp, des, false);
                    cvImage.Dispose();

                    var searchResults = new List <SearchResult>();
                    Invoke(new Action(() => {
                        screenshotViewer.SetProgress(0.0);
                    }));
                    foreach (var kvp in heroDescriptors)
                    {
                        using (var vMatches = new Emgu.CV.Util.VectorOfVectorOfDMatch()) {
                            matcher.KnnMatch(kvp.Value, des, vMatches, 2);
                            const float maxdist = 0.7f;
                            var         matches = vMatches.ToArrayOfArray().Where(m => m[0].Distance < maxdist * m[1].Distance).ToList();
                            if (matches.Any())
                            {
                                searchResults.Add(new SearchResult(kvp.Key, matches, kp));
                            }
                        }
                        nCurrent++;
                        Invoke(new Action(() => {
                            screenshotViewer.SetProgress((double)nCurrent / nTotal);
                        }));
                    }
                    searchResults.Sort((a, b) => - a.Distance.CompareTo(b.Distance));
                    searchResults.RemoveAll(t => searchResults.Take(searchResults.IndexOf(t)).Select(u => u.Name).Contains(t.Name));
                    var bans_picks = searchResults.Take(16).OrderBy(t => t.Location.Y).ToList();
                    var bans       = bans_picks.Take(6).OrderBy(t => t.Location.X).ToList();
                    var picks      = bans_picks.Skip(6).OrderBy(t => t.Location.X).ToList();
                    var t1picks    = picks.Take(5).OrderBy(t => t.Location.Y).ToList();
                    var t2picks    = picks.Skip(5).OrderBy(t => t.Location.Y).ToList();

                    var bgSearchResults = new List <SearchResult>();
                    foreach (var kvp in bgnameDescriptors)
                    {
                        using (var vMatches = new Emgu.CV.Util.VectorOfVectorOfDMatch()) {
                            matcher.KnnMatch(kvp.Value, des, vMatches, 2);
                            const float maxdist = 0.7f;
                            var         matches = vMatches.ToArrayOfArray().Where(m => m[0].Distance < maxdist * m[1].Distance).ToList();
                            if (matches.Any())
                            {
                                bgSearchResults.Add(new SearchResult(kvp.Key, matches, kp));
                            }
                        }
                        nCurrent++;
                        Invoke(new Action(() => {
                            screenshotViewer.SetProgress((double)nCurrent / nTotal);
                        }));
                    }
                    var bgSearchResult = bgSearchResults.OrderBy(t => - t.Distance).First();
                    Invoke(new Action(() => {
                        screenshotViewer.SetProgress(Stage.Complete);
                        screenshotViewer.SetSearchResults(bans_picks.ToArray(), bgSearchResult);
                        c_bg.Text = bgSearchResult.Name;
                        screenshotViewer.Show();
                        Focus();
                    }));
                }
        }
예제 #30
0
        private static void GrayValueTask()
        {
            while (true)
            {
                while (m_CheckList.Count > 0)
                {
                    DataGridViewRow t_Checks = m_CheckList[0];
                    lock (m_Mutex)
                    {
                        string   t_FolderPath = t_Checks.Cells["FolderPath"].Value.ToString();
                        string[] t_ImageFiles = System.IO.Directory.GetFiles(t_FolderPath);
                        foreach (string t_ImageFile in t_ImageFiles)
                        {
                            string[] t_ImageFileSplit = System.IO.Path.GetFileNameWithoutExtension(t_ImageFile).Split('_');
                            string   t_StationNumber  = t_ImageFileSplit[2];
                            if (t_ImageFileSplit[2].CompareTo("15") == 0)
                            {
                                t_StationNumber = t_ImageFileSplit[2] + "_" + t_ImageFileSplit[3];
                            }
                            string  t_StationName = m_ImageNameMappings[t_StationNumber];
                            Point[] t_ROI         = m_FixROILocations[t_StationName];

                            System.Drawing.Bitmap t_Bitmap = new Bitmap(t_ImageFile);

                            Emgu.CV.Mat t_Mat = Emgu.CV.CvInvoke.Imread(t_ImageFile, Emgu.CV.CvEnum.ImreadModes.AnyColor);
                            //Emgu.CV.CvInvoke.NamedWindow("A", Emgu.CV.CvEnum.NamedWindowType.FreeRatio);
                            double t_Average = 0.0;
                            if (t_Bitmap.PixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
                            {
                                Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> t_Image = new Emgu.CV.Image <Emgu.CV.Structure.Gray, byte>(t_Mat.Bitmap);
                                t_Image.ROI = new Rectangle(t_ROI[0].X, t_ROI[0].Y, t_ROI[1].X - t_ROI[0].X, t_ROI[1].Y - t_ROI[0].Y);
                                Emgu.CV.Structure.Gray t_AverageBGR = t_Image.GetAverage();
                                t_Average = t_AverageBGR.MCvScalar.V0;
                                t_Image.Dispose();
                                t_Image = null;
                            }
                            else
                            {
                                Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> t_Image = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte>(t_Mat.Bitmap);
                                t_Image.ROI = new Rectangle(t_ROI[0].X, t_ROI[0].Y, t_ROI[1].X - t_ROI[0].X, t_ROI[1].Y - t_ROI[0].Y);
                                Emgu.CV.Structure.Bgr t_AverageBGR = t_Image.GetAverage();
                                t_Average = t_AverageBGR.MCvScalar.V2;
                                t_Image.Dispose();
                                t_Image = null;
                            }
                            t_Checks.Cells[t_StationName].Value = t_Average;
                            t_Mat.Dispose();
                            t_Mat = null;
                            t_Bitmap.Dispose();
                            t_Bitmap = null;
                            GC.Collect();
                        }
                    }
                    string t_ResultsString = string.Empty;
                    foreach (DataGridViewCell t_Check in t_Checks.Cells)
                    {
                        t_ResultsString += t_Check.Value + ",";
                    }
                    t_ResultsString = t_ResultsString.Remove(t_ResultsString.Length - 1, 1);
                    System.IO.File.AppendAllText("Results.csv", t_ResultsString);
                    System.IO.File.AppendAllText("Results.csv", System.Environment.NewLine);

                    m_CheckList.RemoveAt(0);
                }
            }
        }