Ejemplo n.º 1
0
        private void InitFaceDetector()
        {
            if (_faceDetector == null)
            {
                InitPath();
                String ssdFileLocal = DnnPage.DnnDownloadFile(
                    "https://github.com/opencv/opencv_3rdparty/raw/dnn_samples_face_detector_20170830/",
                    "res10_300x300_ssd_iter_140000.caffemodel",
                    _path);

                String ssdProtoFileLocal = DnnPage.DnnDownloadFile(
                    "https://raw.githubusercontent.com/opencv/opencv/4.0.1/samples/dnn/face_detector/",
                    "deploy.prototxt",
                    _path);
                _faceDetector = DnnInvoke.ReadNetFromCaffe(ssdProtoFileLocal, ssdFileLocal);
            }
        }
        private void loadToolStripMenuItem_Click(object sender, EventArgs e)
        {
            //https://github.com/onnx

            try
            {
                OpenFileDialog dialog = new OpenFileDialog();
                dialog.Filter = "ONNX model (*.onnx)|*.onnx;";
                if (dialog.ShowDialog() == DialogResult.OK)
                {
                    model = DnnInvoke.ReadNetFromONNX(dialog.FileName);
                    MessageBox.Show("Model loaded.");
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Ejemplo n.º 3
0
        public Mat GetFaceEmbeddingSecond(Mat image)
        {
            var(fH, fW) = (image.Size.Height, image.Size.Width);
            if (fH < 300 || fW < 300)
            {
                return(null);
            }

            var face300 = new Mat();

            CvInvoke.ResizeForFrame(image, face300, new Size(300, 300));
            var faceBlob = DnnInvoke.BlobFromImage(face300, 1.0 / 255, new Size(300, 300), new MCvScalar(0, 0, 0),
                                                   swapRB: true, crop: false);

            embedderSecond.SetInput(faceBlob);
            var vec = embedderSecond.Forward().Clone();

            return(vec.Reshape(1));
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Receive an image from camera.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void Camera_ImageGrabbed(object sender, EventArgs e)
        {
            camera.Retrieve(frame);

            //CvInvoke.Flip(frame, frame, Emgu.CV.CvEnum.FlipType.Horizontal);
            Mat blobs = DnnInvoke.BlobFromImage(frame, 1.0, new System.Drawing.Size(detectionSize, detectionSize));

            net.SetInput(blobs);
            Mat detections = net.Forward();

            float[,,,] detectionsArrayInFloats = detections.GetData() as float[, , , ];

            for (int i = 0; i < detectionsArrayInFloats.GetLength(2); i++)
            {
                if (Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 2], CultureInfo.InvariantCulture) > 0.4)
                {
                    float Xstart = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 3],
                                                    CultureInfo.InvariantCulture) * detectionSize * xRate;
                    float Ystart = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 4],
                                                    CultureInfo.InvariantCulture) * detectionSize * yRate;
                    float Xend = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 5],
                                                  CultureInfo.InvariantCulture) * detectionSize * xRate;
                    float Yend = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 6],
                                                  CultureInfo.InvariantCulture) * detectionSize * yRate;

                    System.Drawing.Rectangle rect = new System.Drawing.Rectangle
                    {
                        X      = (int)Xstart,
                        Y      = (int)Ystart,
                        Height = (int)(Yend - Ystart),
                        Width  = (int)(Xend - Xstart)
                    };

                    frame.Draw(rect, new Bgr(0, 255, 0), 2);
                }
            }

            Dispatcher.Invoke(new Action(() =>
            {
                img.Source = frame.Bitmap.BitmapToBitmapSource();
            }));
        }
Ejemplo n.º 5
0
        public Form1()
        {
            InitializeComponent();
            try
            {
                eyes_detect = new CascadeClassifier(Path.GetFullPath("Models/haarcascade_eye.xml"));

                xRate = resolutionX / (float)detectionSize;
                yRate = resolutionY / (float)detectionSize;
                net   = DnnInvoke.ReadNetFromCaffe(protoPath, caffemodelPath);

                this.Width  = resolutionX;
                this.Height = resolutionY;
                camera      = new VideoCapture(cameraIndex);
                camera.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, resolutionX);
                camera.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, resolutionY);
                //camera.FlipHorizontal = true;
            }
            catch (NullReferenceException ex)
            {
                throw;
            }
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Perform detection on the input image and return the results
        /// </summary>
        /// <param name="m">The input image</param>
        /// <param name="matchScoreThreshold">A threshold used to filter boxes by score.</param>
        /// <param name="nmsThreshold">A threshold used in non maximum suppression.</param>
        /// <returns>The detected objects</returns>
        public MaskedObject[] Detect(IInputArray m, float matchScoreThreshold = 0.5f, float nmsThreshold = 0.4f)
        {
            using (InputArray iaM = m.GetInputArray())
                using (Mat blob = DnnInvoke.BlobFromImage(m))
                    using (VectorOfMat tensors = new VectorOfMat())
                    {
                        _maskRcnnDetector.SetInput(blob, "image_tensor");
                        _maskRcnnDetector.Forward(tensors, new string[] { "detection_out_final", "detection_masks" });

                        using (Mat boxes = tensors[0])
                            using (Mat masks = tensors[1])
                            {
                                System.Drawing.Size imgSize = iaM.GetSize();
                                float[,,,] boxesData = boxes.GetData(true) as float[, , , ];
                                int numDetections = boxesData.GetLength(2);

                                List <int>       classIds = new List <int>();
                                List <Rectangle> regions  = new List <Rectangle>();
                                List <float>     scores   = new List <float>();

                                for (int i = 0; i < numDetections; i++)
                                {
                                    int classId = (int)boxesData[0, 0, i, 1];

                                    if (_objectsOfInterest == null || _objectsOfInterest.Contains(_labels[classId]))
                                    {
                                        float     score = boxesData[0, 0, i, 2];
                                        Rectangle rect  = DetectedObject.GetRectangle(
                                            boxesData[0, 0, i, 3],
                                            boxesData[0, 0, i, 4],
                                            boxesData[0, 0, i, 5],
                                            boxesData[0, 0, i, 6],
                                            imgSize.Width,
                                            imgSize.Height);
                                        rect.Intersect(new Rectangle(Point.Empty, imgSize));

                                        regions.Add(rect);
                                        scores.Add(score);
                                        classIds.Add(classId);
                                    }
                                }
                                int[] validIdx = DnnInvoke.NMSBoxes(regions.ToArray(), scores.ToArray(), matchScoreThreshold, nmsThreshold);
                                List <MaskedObject> maskedObjects = new List <MaskedObject>();

                                for (int i = 0; i < validIdx.Length; i++)
                                {
                                    int       idx     = validIdx[i];
                                    int       classId = classIds[idx];
                                    Rectangle rect    = regions[idx];
                                    float     score   = scores[idx];

                                    int[] masksDim = masks.SizeOfDimension;
                                    using (Mat mask = new Mat(
                                               masksDim[2],
                                               masksDim[3],
                                               DepthType.Cv32F,
                                               1,
                                               masks.GetDataPointer(i, classId),
                                               masksDim[3] * masks.ElementSize))
                                    {
                                        MaskedObject mo = new MaskedObject(classId, _labels[classId], score, rect, mask);
                                        maskedObjects.Add(mo);
                                    }
                                }

                                return(maskedObjects.ToArray());
                            }
                    }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Initiate the DNN model. If needed, it will download the model from internet.
        /// </summary>
        /// <returns>null if successful. Otherwise, it will return the error message</returns>
        private String InitDetector()
        {
            if (_maskRcnnDetector == null)
            {
                InitPath();

                String url =
                    "https://github.com/emgucv/models/raw/master/mask_rcnn_inception_v2_coco_2018_01_28/";

                String graphFile = "frozen_inference_graph.pb";
                try
                {
                    graphFile = DnnDownloadFile(url, graphFile, _path);
                }
                catch (Exception e)
                {
                    return(String.Format("Failed to download the file {0} from {1}: {2}", graphFile, url, e.Message));
                }


                String lookupFile = "coco-labels-paper.txt";
                try
                {
                    lookupFile = DnnDownloadFile(url, lookupFile, _path);
                }
                catch (Exception e)
                {
                    return(String.Format("Failed to download the file {0} from {1}: {2}", lookupFile, url, e.Message));
                }

                String url2       = "https://github.com/opencv/opencv_extra/raw/4.1.0/testdata/dnn/";
                String configFile = "mask_rcnn_inception_v2_coco_2018_01_28.pbtxt";
                try
                {
                    configFile = DnnDownloadFile(
                        url2,
                        configFile,
                        _path);
                }
                catch (Exception e)
                {
                    return(String.Format("Failed to download the file {0} from {1}: {2}", configFile, url2, e.Message));
                }

                _maskRcnnDetector = Emgu.CV.Dnn.DnnInvoke.ReadNetFromTensorflow(graphFile, configFile);


                //prefer cuda backend if available
                foreach (BackendTargetPair p in DnnInvoke.GetAvailableBackends())
                {
                    if (p.Backend == Dnn.Backend.Cuda && p.Target == Target.Cuda)
                    {
                        _maskRcnnDetector.SetPreferableBackend(Dnn.Backend.Cuda);
                        _maskRcnnDetector.SetPreferableTarget(Target.Cuda);
                        break;
                    }
                }

                //_maskRcnnDetector.SetPreferableBackend(Dnn.Backend.OpenCV);
                //_maskRcnnDetector.SetPreferableTarget(Dnn.Target.Cpu);

                _labels = File.ReadAllLines(lookupFile);
                _colors = new MCvScalar[_labels.Length];
                Random r = new Random(12345);
                for (int i = 0; i < _colors.Length; i++)
                {
                    _colors[i] = new MCvScalar(r.Next(256), r.Next(256), r.Next(256));
                }
            }

            return(null);
        }
Ejemplo n.º 8
0
        public AboutPage()
        {
            String openclTxt = String.Format("Has OpenCL: {0}", CvInvoke.HaveOpenCL);

            String lineBreak = "<br/>";

            if (CvInvoke.HaveOpenCL)
            {
                openclTxt = String.Format("{0}{1}Use OpenCL: {2}{1}<textarea rows=\"5\">{3}</textarea>{1}",
                                          openclTxt, lineBreak,
                                          CvInvoke.UseOpenCL,
                                          CvInvoke.OclGetPlatformsSummary());
            }


            var           dnnBackends     = DnnInvoke.GetAvailableBackends();
            List <String> dnnBackendsText = new List <string>();

            foreach (var dnnBackend in dnnBackends)
            {
                dnnBackendsText.Add(String.Format("{0} - {1}", dnnBackend.Backend, dnnBackend.Target));
            }

            String dnnText = String.Join(";", dnnBackendsText.ToArray());


            String osDescription = Emgu.Util.Platform.OperationSystem.ToString();

            Content =
                new WebView()
            {
                WidthRequest  = 1000,
                HeightRequest = 1000,
                Source        = new HtmlWebViewSource()
                {
                    Html =
                        @"<html>
<head>
<style>body { background-color: #EEEEEE; }</style>
<style type=""text/css"">
textarea { width: 100%; margin: 0; padding: 0; border - width: 0; }
</style>
</head>
<body>
<H2> Emgu CV Examples </H2>
<a href=http://www.emgu.com>Visit our website</a> <br/><br/>
<a href=mailto:[email protected]>Email Support</a> <br/><br/>
<H4> OpenCL Info </H4>
" + openclTxt + @"
<H4> OS: </H4>
" + osDescription + @"
<H4> OS Architecture: </H4>
" + RuntimeInformation.OSArchitecture + @"
<H4> Framework Description: </H4>
" + RuntimeInformation.FrameworkDescription + @"
<H4> Process Architecture: </H4>
" + RuntimeInformation.ProcessArchitecture + @"
<H4> Dnn Backends: </H4>
" + dnnText + @"
<H4> Build Info </H4>
<textarea rows=""30"">"
                        + CvInvoke.BuildInformation + @"
</textarea>
</body>
</html>"
                }
            };
        }
Ejemplo n.º 9
0
        public DnnPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Mask-rcnn Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image[0] == null)
                {
                    return;
                }
                SetMessage("Please wait...");
                SetImage(null);

                Task <Tuple <Mat, String, long> > t = new Task <Tuple <Mat, String, long> >(
                    () =>
                {
                    String configFile = "mask_rcnn_inception_v2_coco_2018_01_28.pbtxt";
#if __ANDROID__
                    String path = System.IO.Path.Combine(Android.OS.Environment.ExternalStorageDirectory.AbsolutePath,
                                                         Android.OS.Environment.DirectoryDownloads, "dnn_data");
                    FileInfo configFileInfo = AndroidFileAsset.WritePermanantFileAsset(Android.App.Application.Context, configFile, "dnn_data", AndroidFileAsset.OverwriteMethod.AlwaysOverwrite);
                    configFile = configFileInfo.FullName;
#else
                    String path = "./dnn_data/";
#endif

                    String graphFile  = DnnDownloadFile(path, "frozen_inference_graph.pb");
                    String lookupFile = DnnDownloadFile(path, "coco-labels-paper.txt");

                    string[] labels     = File.ReadAllLines(lookupFile);
                    Emgu.CV.Dnn.Net net = Emgu.CV.Dnn.DnnInvoke.ReadNetFromTensorflow(graphFile, configFile);


                    Mat blob = DnnInvoke.BlobFromImage(image[0]);

                    net.SetInput(blob, "image_tensor");
                    using (VectorOfMat tensors = new VectorOfMat())
                    {
                        net.Forward(tensors, new string[] { "detection_out_final", "detection_masks" });
                        using (Mat boxes = tensors[0])
                            using (Mat masks = tensors[1])
                            {
                                System.Drawing.Size imgSize = image[0].Size;
                                float[,,,] boxesData        = boxes.GetData(true) as float[, , , ];
                                //float[,,,] masksData = masks.GetData(true) as float[,,,];
                                int numDetections = boxesData.GetLength(2);
                                for (int i = 0; i < numDetections; i++)
                                {
                                    float score = boxesData[0, 0, i, 2];

                                    if (score > 0.5)
                                    {
                                        int classId  = (int)boxesData[0, 0, i, 1];
                                        String label = labels[classId];

                                        float left   = boxesData[0, 0, i, 3] * imgSize.Width;
                                        float top    = boxesData[0, 0, i, 4] * imgSize.Height;
                                        float right  = boxesData[0, 0, i, 5] * imgSize.Width;
                                        float bottom = boxesData[0, 0, i, 6] * imgSize.Height;

                                        RectangleF rectF = new RectangleF(left, top, right - left, bottom - top);
                                        Rectangle rect   = Rectangle.Round(rectF);
                                        rect.Intersect(new Rectangle(Point.Empty, imgSize));
                                        CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 0, 0), 1);
                                        CvInvoke.PutText(image[0], label, rect.Location, FontFace.HersheyComplex, 1.0,
                                                         new MCvScalar(0, 0, 255), 2);

                                        int[] masksDim = masks.SizeOfDimension;
                                        using (Mat mask = new Mat(
                                                   masksDim[2],
                                                   masksDim[3],
                                                   DepthType.Cv32F,
                                                   1,
                                                   //masks.DataPointer +
                                                   //(i * masksDim[1] + classId )
                                                   //* masksDim[2] * masksDim[3] * masks.ElementSize,
                                                   masks.GetDataPointer(i, classId),
                                                   masksDim[3] * masks.ElementSize))
                                            using (Mat maskLarge = new Mat())
                                                using (Mat maskLargeInv = new Mat())
                                                    using (Mat subRegion = new Mat(image[0], rect))
                                                        using (Mat largeColor = new Mat(subRegion.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 3))
                                                        {
                                                            CvInvoke.Resize(mask, maskLarge, rect.Size);

                                                            //give the mask at least 30% transparency
                                                            using (ScalarArray sa = new ScalarArray(0.7))
                                                                CvInvoke.Min(sa, maskLarge, maskLarge);

                                                            //Create the inverse mask for the original image
                                                            using (ScalarArray sa = new ScalarArray(1.0))
                                                                CvInvoke.Subtract(sa, maskLarge, maskLargeInv);

                                                            //The mask color
                                                            largeColor.SetTo(new Emgu.CV.Structure.MCvScalar(255, 0, 0));
                                                            if (subRegion.NumberOfChannels == 4)
                                                            {
                                                                using (Mat bgrSubRegion = new Mat())
                                                                {
                                                                    CvInvoke.CvtColor(subRegion, bgrSubRegion, ColorConversion.Bgra2Bgr);
                                                                    CvInvoke.BlendLinear(largeColor, bgrSubRegion, maskLarge, maskLargeInv, bgrSubRegion);
                                                                    CvInvoke.CvtColor(bgrSubRegion, subRegion, ColorConversion.Bgr2Bgra);
                                                                }
                                                            }
                                                            else
                                                            {
                                                                CvInvoke.BlendLinear(largeColor, subRegion, maskLarge, maskLargeInv, subRegion);
                                                            }
                                                        }
                                    }
                                }
                            }
                    }
                    long time = 0;

                    return(new Tuple <Mat, String, long>(image[0], null, time));
                });
                t.Start();

                var result = await t;
                SetImage(t.Result.Item1);
                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

                SetMessage(t.Result.Item2);
            };
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Detect vehicle from the given image
        /// </summary>
        /// <param name="image">The image</param>
        /// <returns>The detected vehicles.</returns>
        public Vehicle[] Detect(Mat image)
        {
            int       imgDim                          = 300;
            int       vehicleAttrSize                 = 72;
            MCvScalar meanVal                         = new MCvScalar();
            double    scale                           = 1.0;
            float     vehicleConfidenceThreshold      = 0.5f;
            float     licensePlateConfidenceThreshold = 0.5f;

            //MCvScalar meanVal = new MCvScalar(127.5, 127.5, 127.5);
            //double scale = 127.5;

            Size imageSize = image.Size;

            using (Mat inputBlob = DnnInvoke.BlobFromImage(
                       image,
                       scale,
                       new Size(imgDim, imgDim),
                       meanVal,
                       false,
                       false,
                       DepthType.Cv32F))
                _vehicleLicensePlateDetector.SetInput(inputBlob, "Placeholder");

            List <Vehicle>      vehicles = new List <Vehicle>();
            List <LicensePlate> plates   = new List <LicensePlate>();

            using (Mat detection = _vehicleLicensePlateDetector.Forward("DetectionOutput_"))
            {
                float[,,,] values = detection.GetData(true) as float[, , , ];
                for (int i = 0; i < values.GetLength(2); i++)
                {
                    float      imageId      = values[0, 0, i, 0];
                    float      label        = values[0, 0, i, 1]; //if label == 1, it is a vehicle; if label == 2, it is a license plate
                    float      confident    = values[0, 0, i, 2];
                    float      xLeftBottom  = values[0, 0, i, 3] * imageSize.Width;
                    float      yLeftBottom  = values[0, 0, i, 4] * imageSize.Height;
                    float      xRightTop    = values[0, 0, i, 5] * imageSize.Width;
                    float      yRightTop    = values[0, 0, i, 6] * imageSize.Height;
                    RectangleF objectRegion = new RectangleF(
                        xLeftBottom,
                        yLeftBottom,
                        xRightTop - xLeftBottom,
                        yRightTop - yLeftBottom);
                    Rectangle region = Rectangle.Round(objectRegion);

                    if (label == 1 && confident > vehicleConfidenceThreshold)
                    {   //this is a vehicle
                        Vehicle v = new Vehicle();
                        v.Region = region;

                        #region find out the type and color of the vehicle
                        using (Mat vehicle = new Mat(image, region))
                        {
                            using (Mat vehicleBlob = DnnInvoke.BlobFromImage(
                                       vehicle,
                                       scale,
                                       new Size(vehicleAttrSize, vehicleAttrSize),
                                       meanVal,
                                       false,
                                       false,
                                       DepthType.Cv32F))
                            {
                                _vehicleAttrRecognizer.SetInput(vehicleBlob, "input");

                                using (VectorOfMat vm = new VectorOfMat(2))
                                {
                                    _vehicleAttrRecognizer.Forward(vm, new string[] { "color", "type" });
                                    using (Mat vehicleColorMat = vm[0])
                                        using (Mat vehicleTypeMat = vm[1])
                                        {
                                            float[] vehicleColorData = vehicleColorMat.GetData(false) as float[];
                                            float   maxProbColor     = vehicleColorData.Max();
                                            int     maxIdxColor      = Array.IndexOf(vehicleColorData, maxProbColor);
                                            v.Color = _colorName[maxIdxColor];
                                            float[] vehicleTypeData = vehicleTypeMat.GetData(false) as float[];
                                            float   maxProbType     = vehicleTypeData.Max();
                                            int     maxIdxType      = Array.IndexOf(vehicleTypeData, maxProbType);
                                            v.Type = _vehicleType[maxIdxType];
                                        }
                                }
                            }
                        }
                        #endregion
                        vehicles.Add(v);
                    }

                    if (label == 2 && confident > licensePlateConfidenceThreshold)
                    {   //this is a license plate
                        LicensePlate p = new LicensePlate();
                        p.Region = region;

                        #region OCR on license plate
                        using (Mat plate = new Mat(image, region))
                        {
                            using (Mat inputBlob = DnnInvoke.BlobFromImage(
                                       plate,
                                       scale,
                                       new Size(94, 24),
                                       meanVal,
                                       false,
                                       false,
                                       DepthType.Cv32F))
                                using (Mat seqInd = new Mat(
                                           new Size(1, 88),
                                           DepthType.Cv32F,
                                           1))
                                {
                                    _ocr.SetInput(inputBlob, "data");

                                    if (seqInd.Depth == DepthType.Cv32F)
                                    {
                                        float[] seqIndValues = new float[seqInd.Width * seqInd.Height];
                                        for (int j = 1; j < seqIndValues.Length; j++)
                                        {
                                            seqIndValues[j] = 1.0f;
                                        }
                                        seqIndValues[0] = 0.0f;
                                        seqInd.SetTo(seqIndValues);
                                    }
                                    _ocr.SetInput(seqInd, "seq_ind");

                                    using (Mat output = _ocr.Forward("decode"))
                                    {
                                        float[]       plateValue = output.GetData(false) as float[];
                                        StringBuilder licensePlateStringBuilder = new StringBuilder();
                                        foreach (int j in plateValue)
                                        {
                                            if (j >= 0)
                                            {
                                                licensePlateStringBuilder.Append(_plateText[j]);
                                            }
                                        }

                                        p.Text = licensePlateStringBuilder.ToString();
                                    }
                                }
                        }
                        #endregion

                        plates.Add(p);
                    }
                }

                foreach (LicensePlate p in plates)
                {
                    foreach (Vehicle v in vehicles)
                    {
                        if (v.ContainsPlate(p))
                        {
                            v.LicensePlate = p;
                            break;
                        }
                    }
                }
            }
            return(vehicles.ToArray());
        }
Ejemplo n.º 11
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            using (Image <Bgr, byte> frame = camera.QueryFrame().ToImage <Bgr, byte>())
            {
                if (frame != null)
                {
                    Image <Gray, Byte> grayImage = frame.Convert <Gray, byte>();
                    var StoreEyes = eyes_detect.DetectMultiScale(grayImage);

                    CvInvoke.Flip(frame, frame, Emgu.CV.CvEnum.FlipType.Horizontal);
                    Mat blobs = DnnInvoke.BlobFromImage(frame, 1.0, new System.Drawing.Size(detectionSize, detectionSize));
                    net.SetInput(blobs);
                    Mat detections = net.Forward();

                    float[,,,] detectionsArrayInFloats = detections.GetData() as float[, , , ];

                    for (int i = 0; i < detectionsArrayInFloats.GetLength(2); i++)
                    {
                        if (Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 2], CultureInfo.InvariantCulture) > 0.4)
                        {
                            float Xstart = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 3],
                                                            CultureInfo.InvariantCulture) * detectionSize * xRate;
                            float Ystart = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 4],
                                                            CultureInfo.InvariantCulture) * detectionSize * yRate;
                            float Xend = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 5],
                                                          CultureInfo.InvariantCulture) * detectionSize * xRate;
                            float Yend = Convert.ToSingle(detectionsArrayInFloats[0, 0, i, 6],
                                                          CultureInfo.InvariantCulture) * detectionSize * yRate;

                            System.Drawing.Rectangle rect = new System.Drawing.Rectangle
                            {
                                X      = (int)Xstart,
                                Y      = (int)Ystart,
                                Height = (int)(Yend - Ystart),
                                Width  = (int)(Xend - Xstart)
                            };
                            frame.Draw(rect, new Bgr(0, 255, 0), 2);
                            foreach (var hEye in StoreEyes)
                            {
                                //frame.Draw(hEye, new Bgr(0, double.MaxValue, 0), 3);
                                var avgEyes   = StoreEyes?.Average(it => (it.Right + it.Left) / 2) ?? 0;
                                var turnLeft  = (Xstart + Xend) * 0.58;
                                var turnRight = (Xstart + Xend) * 0.42;
                                Console.WriteLine($"Xstart in Eyes = {Xstart}");
                                Console.WriteLine($"Ystart in Eyes = {Ystart}");
                                Console.WriteLine($"turnLeft = {turnLeft}");
                                Console.WriteLine($"turnRight = {turnRight}");
                                Console.WriteLine($"avgEyes = {avgEyes}");
                                if (avgEyes <= turnLeft && avgEyes >= turnRight)
                                {
                                    text = "facing straight";
                                }
                                else if (avgEyes > turnLeft)
                                {
                                    text = "turn left";
                                }
                                else if (avgEyes < turnRight)
                                {
                                    text = "turn right";
                                }
                            }
                            CvInvoke.PutText(frame, text, new Point(rect.X - 2, rect.Y - 2), Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(Color.Red).MCvScalar);
                        }
                    }
                    imageBox1.Image = frame;
                }
            }
        }
Ejemplo n.º 12
0
        public FaceLandmarkDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Face Landmark Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image[0] == null)
                {
                    return;
                }
                SetMessage("Please wait...");
                SetImage(null);
                Task <Tuple <IInputArray, long> > t = new Task <Tuple <IInputArray, long> >(
                    () =>
                {
                    InitFaceDetector();
                    InitFacemark();

                    int imgDim        = 300;
                    MCvScalar meanVal = new MCvScalar(104, 177, 123);
                    Stopwatch watch   = Stopwatch.StartNew();
                    Size imageSize    = image[0].Size;
                    using (Mat inputBlob = DnnInvoke.BlobFromImage(
                               image[0],
                               1.0,
                               new Size(imgDim, imgDim),
                               meanVal,
                               false,
                               false))
                        _faceDetector.SetInput(inputBlob, "data");
                    using (Mat detection = _faceDetector.Forward("detection_out"))
                    {
                        float confidenceThreshold = 0.5f;

                        List <Rectangle> faceRegions = new List <Rectangle>();

                        float[,,,] values = detection.GetData(true) as float[, , , ];
                        for (int i = 0; i < values.GetLength(2); i++)
                        {
                            float confident = values[0, 0, i, 2];

                            if (confident > confidenceThreshold)
                            {
                                float xLeftBottom       = values[0, 0, i, 3] * imageSize.Width;
                                float yLeftBottom       = values[0, 0, i, 4] * imageSize.Height;
                                float xRightTop         = values[0, 0, i, 5] * imageSize.Width;
                                float yRightTop         = values[0, 0, i, 6] * imageSize.Height;
                                RectangleF objectRegion = new RectangleF(
                                    xLeftBottom,
                                    yLeftBottom,
                                    xRightTop - xLeftBottom,
                                    yRightTop - yLeftBottom);
                                Rectangle faceRegion = Rectangle.Round(objectRegion);
                                faceRegions.Add(faceRegion);
                            }
                        }

                        using (VectorOfRect vr = new VectorOfRect(faceRegions.ToArray()))
                            using (VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF())
                            {
                                _facemark.Fit(image[0], vr, landmarks);

                                foreach (Rectangle face in faceRegions)
                                {
                                    CvInvoke.Rectangle(image[0], face, new MCvScalar(0, 255, 0));
                                }

                                int len = landmarks.Size;
                                for (int i = 0; i < landmarks.Size; i++)
                                {
                                    using (VectorOfPointF vpf = landmarks[i])
                                        FaceInvoke.DrawFacemarks(image[0], vpf, new MCvScalar(255, 0, 0));
                                }
                            }
                        watch.Stop();
                        return(new Tuple <IInputArray, long>(image[0], watch.ElapsedMilliseconds));
                    }
                });
                t.Start();

                var result = await t;
                SetImage(t.Result.Item1);
                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

                SetMessage(String.Format("Detected in {0} milliseconds.", t.Result.Item2));
            };
        }
Ejemplo n.º 13
0
        private void DetectAndRender(Mat image)
        {
            int       imgDim  = 300;
            MCvScalar meanVal = new MCvScalar(104, 177, 123);

            Size imageSize = image.Size;

            using (Mat inputBlob = DnnInvoke.BlobFromImage(
                       image,
                       1.0,
                       new Size(imgDim, imgDim),
                       meanVal,
                       false,
                       false))
                _faceDetector.SetInput(inputBlob, "data");
            using (Mat detection = _faceDetector.Forward("detection_out"))
            {
                float confidenceThreshold = 0.5f;

                List <Rectangle> faceRegions = new List <Rectangle>();

                float[,,,] values = detection.GetData(true) as float[, , , ];
                for (int i = 0; i < values.GetLength(2); i++)
                {
                    float confident = values[0, 0, i, 2];

                    if (confident > confidenceThreshold)
                    {
                        float      xLeftBottom  = values[0, 0, i, 3] * imageSize.Width;
                        float      yLeftBottom  = values[0, 0, i, 4] * imageSize.Height;
                        float      xRightTop    = values[0, 0, i, 5] * imageSize.Width;
                        float      yRightTop    = values[0, 0, i, 6] * imageSize.Height;
                        RectangleF objectRegion = new RectangleF(
                            xLeftBottom,
                            yLeftBottom,
                            xRightTop - xLeftBottom,
                            yRightTop - yLeftBottom);
                        Rectangle faceRegion = Rectangle.Round(objectRegion);
                        faceRegions.Add(faceRegion);
                    }
                }

                using (VectorOfRect vr = new VectorOfRect(faceRegions.ToArray()))
                    using (VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF())
                    {
                        _facemark.Fit(image, vr, landmarks);

                        foreach (Rectangle face in faceRegions)
                        {
                            CvInvoke.Rectangle(image, face, new MCvScalar(0, 255, 0));
                        }

                        int len = landmarks.Size;
                        for (int i = 0; i < landmarks.Size; i++)
                        {
                            using (VectorOfPointF vpf = landmarks[i])
                                FaceInvoke.DrawFacemarks(image, vpf, new MCvScalar(255, 0, 0));
                        }
                    }
            }
        }
Ejemplo n.º 14
0
        private void PoseEstimationBody_25_Click(object sender, EventArgs e)
        {
            try
            {
                if (!IMGDict.ContainsKey("input"))
                {
                    throw new Exception("Please read in image first.");
                }


                // for openopse
                int   inWidth   = 368;
                int   inHeight  = 368;
                float threshold = 0.1f;
                int   nPoints   = 25;

                var BODY_PARTS = new Dictionary <string, int>()
                {
                    { "Nose", 0 },
                    { "Neck", 1 },
                    { "RShoulder", 2 },
                    { "RElbow", 3 },
                    { "RWrist", 4 },
                    { "LShoulder", 5 },
                    { "LElbow", 6 },
                    { "LWrist", 7 },
                    { "MidHip", 8 },
                    { "RHip", 9 },
                    { "RKnee", 10 },
                    { "RAnkle", 11 },
                    { "LHip", 12 },
                    { "LKnee", 13 },
                    { "LAnkle", 14 },
                    { "REye", 15 },
                    { "LEye", 16 },
                    { "REar", 17 },
                    { "LEar", 18 },
                    { "LBigToe", 19 },
                    { "LSmallToe", 20 },
                    { "LHeel", 21 },
                    { "RBigToe", 22 },
                    { "RSmallToe", 23 },
                    { "RHeel", 24 },
                    { "Background", 25 }
                };

                int[,] point_pairs = new int[, ] {
                    { 1, 0 }, { 1, 2 }, { 1, 5 },
                    { 2, 3 }, { 3, 4 }, { 5, 6 },
                    { 6, 7 }, { 0, 15 }, { 15, 17 },
                    { 0, 16 }, { 16, 18 }, { 1, 8 },
                    { 8, 9 }, { 9, 10 }, { 10, 11 },
                    { 11, 22 }, { 22, 23 }, { 11, 24 },
                    { 8, 12 }, { 12, 13 }, { 13, 14 },
                    { 14, 19 }, { 19, 20 }, { 14, 21 }
                };


                // Load the caffe Model
                string prototxt  = @"F:\openpose\models\pose\body_25\pose_deploy.prototxt";
                string modelPath = @"F:\openpose\models\pose\body_25\pose_iter_584000.caffemodel";

                var net = DnnInvoke.ReadNetFromCaffe(prototxt, modelPath);

                var img       = IMGDict["input"].Clone();
                var imgHeight = img.Height;
                var imgWidth  = img.Width;

                var blob = DnnInvoke.BlobFromImage(img, 1.0 / 255.0, new Size(inWidth, inHeight), new MCvScalar(0, 0, 0));
                net.SetInput(blob);
                net.SetPreferableBackend(Emgu.CV.Dnn.Backend.OpenCV);

                var output = net.Forward();

                var H       = output.SizeOfDimension[2];
                var W       = output.SizeOfDimension[3];
                var HeatMap = output.GetData();

                var points = new List <Point>();

                for (int i = 0; i < nPoints; i++)
                {
                    Matrix <float> matrix = new Matrix <float>(H, W);
                    for (int row = 0; row < H; row++)
                    {
                        for (int col = 0; col < W; col++)
                        {
                            matrix[row, col] = (float)HeatMap.GetValue(0, i, row, col);
                        }
                    }

                    double minVal = 0, maxVal = 0;
                    Point  minLoc = default, maxLoc = default;
Ejemplo n.º 15
0
        static void Main()
        {
            //set random color
            Random rnd = new Random();

            for (int i = 0; i < 21; i++)
            {
                Colors[i] = new Rgb(rnd.Next(0, 256), rnd.Next(0, 256), rnd.Next(0, 256)).MCvScalar;
            }
            //get image and set model
            //Mat img = CvInvoke.Imread("bali-crop.jpg");
            Mat img      = CvInvoke.Imread("fish-bike.jpg");
            var blob     = DnnInvoke.BlobFromImage(img, 1, new Size(512, 515));
            var prototxt = "deploy.prototxt";
            var model    = "VGG_VOC0712Plus_SSD_512x512_ft_iter_160000.caffemodel";
            var net      = new Net();
            var import   = Importer.CreateCaffeImporter(prototxt, model);

            import.PopulateNet(net);
            net.SetInput(blob, "data");

            Stopwatch sw = new Stopwatch();

            sw.Start();
            //forward model
            var prob = net.Forward("detection_out");

            sw.Stop();
            Console.WriteLine($"Runtime:{sw.ElapsedMilliseconds} ms");

            //copy result to byte due to egmucv can't access Mat pixel.
            byte[] data = new byte[5600];
            prob.CopyTo(data);

            //draw result
            for (int i = 0; i < prob.SizeOfDimemsion[2]; i++)
            {
                var d = BitConverter.ToSingle(data, i * 28 + 8);
                if (d > 0.4)
                {
                    var idx = (int)BitConverter.ToSingle(data, i * 28 + 4);
                    var w1  = (int)(BitConverter.ToSingle(data, i * 28 + 12) * img.Width);
                    var h1  = (int)(BitConverter.ToSingle(data, i * 28 + 16) * img.Height);
                    var w2  = (int)(BitConverter.ToSingle(data, i * 28 + 20) * img.Width);
                    var h2  = (int)(BitConverter.ToSingle(data, i * 28 + 24) * img.Height);

                    var label = $"{Labels[idx]} {d * 100:0.00}%";
                    Console.WriteLine(label);
                    CvInvoke.Rectangle(img, new Rectangle(w1, h1, w2 - w1, h2 - h1), Colors[idx], 2);
                    int baseline = 0;
                    var textSize = CvInvoke.GetTextSize(label, FontFace.HersheyTriplex, 0.5, 1, ref baseline);
                    var y        = h1 - textSize.Height < 0 ? h1 + textSize.Height : h1;
                    CvInvoke.Rectangle(img, new Rectangle(w1, y - textSize.Height, textSize.Width, textSize.Height), Colors[idx], -1);
                    CvInvoke.PutText(img, label, new Point(w1, y), FontFace.HersheyTriplex, 0.5, new Bgr(0, 0, 0).MCvScalar);
                }
            }

            //Show the image
            CvInvoke.Imshow("image", img);
            CvInvoke.WaitKey();
            CvInvoke.DestroyAllWindows();
        }
Ejemplo n.º 16
0
        public DetectedObject[] Detect(Mat image, double confThreshold = 0.5)
        {
            MCvScalar meanVal = new MCvScalar();

            Size imageSize = image.Size;

            DnnInvoke.BlobFromImage(
                image,
                _inputBlob,
                1.0,
                new Size(416, 416),
                meanVal,
                true,
                false,
                DepthType.Cv8U);
            _yoloDetector.SetInput(_inputBlob, "", 0.00392);
            int[]  outLayers    = _yoloDetector.UnconnectedOutLayers;
            String outLayerType = _yoloDetector.GetLayer(outLayers[0]).Type;

            String[] outLayerNames = _yoloDetector.UnconnectedOutLayersNames;

            using (VectorOfMat outs = new VectorOfMat())
            {
                List <DetectedObject> detectedObjects = new List <DetectedObject>();
                _yoloDetector.Forward(outs, outLayerNames);

                if (outLayerType.Equals("Region"))
                {
                    int size = outs.Size;

                    for (int i = 0; i < size; i++)
                    {
                        // Network produces output blob with a shape NxC where N is a number of
                        // detected objects and C is a number of classes + 4 where the first 4
                        // numbers are [center_x, center_y, width, height]
                        using (Mat m = outs[i])
                        {
                            int rows = m.Rows;
                            int cols = m.Cols;
                            float[,] data = m.GetData(true) as float[, ];
                            for (int j = 0; j < rows; j++)
                            {
                                using (Mat subM = new Mat(m, new Emgu.CV.Structure.Range(j, j + 1), new Emgu.CV.Structure.Range(5, cols)))
                                {
                                    double minVal = 0, maxVal = 0;
                                    Point  minLoc = new Point();
                                    Point  maxLoc = new Point();
                                    CvInvoke.MinMaxLoc(subM, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
                                    if (maxVal > confThreshold)
                                    {
                                        int       centerX = (int)(data[j, 0] * imageSize.Width);
                                        int       centerY = (int)(data[j, 1] * imageSize.Height);
                                        int       width   = (int)(data[j, 2] * imageSize.Width);
                                        int       height  = (int)(data[j, 3] * imageSize.Height);
                                        int       left    = centerX - width / 2;
                                        int       top     = centerY - height / 2;
                                        Rectangle rect    = new Rectangle(left, top, width, height);

                                        DetectedObject obj = new DetectedObject();
                                        obj.ClassId   = maxLoc.X;
                                        obj.Confident = maxVal;
                                        obj.Region    = rect;
                                        obj.Label     = _labels[obj.ClassId];
                                        detectedObjects.Add(obj);
                                    }
                                }
                            }
                        }
                    }

                    return(detectedObjects.ToArray());
                }
                else
                {
                    throw new Exception(String.Format("Unknown output layer type: {0}", outLayerType));
                }
            }
        }
Ejemplo n.º 17
0
        public DnnPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Mask-rcnn Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image[0] == null)
                {
                    return;
                }
                SetMessage("Please wait...");
                SetImage(null);

                Task <Tuple <Mat, String, long> > t = new Task <Tuple <Mat, String, long> >(
                    () =>
                {
                    InitDetector();
                    String msg = String.Empty;
                    using (Mat blob = DnnInvoke.BlobFromImage(image[0]))
                        using (VectorOfMat tensors = new VectorOfMat())
                        {
                            _maskRcnnDetector.SetInput(blob, "image_tensor");
                            Stopwatch watch = Stopwatch.StartNew();
                            _maskRcnnDetector.Forward(tensors, new string[] { "detection_out_final", "detection_masks" });
                            watch.Stop();
                            msg = String.Format("Mask RCNN inception completed in {0} milliseconds.",
                                                watch.ElapsedMilliseconds);

                            using (Mat boxes = tensors[0])
                                using (Mat masks = tensors[1])
                                {
                                    System.Drawing.Size imgSize = image[0].Size;
                                    float[,,,] boxesData        = boxes.GetData(true) as float[, , , ];
                                    int numDetections           = boxesData.GetLength(2);
                                    for (int i = 0; i < numDetections; i++)
                                    {
                                        float score = boxesData[0, 0, i, 2];

                                        if (score > 0.5)
                                        {
                                            int classId     = (int)boxesData[0, 0, i, 1];
                                            String label    = _labels[classId];
                                            MCvScalar color = _colors[classId];
                                            float left      = boxesData[0, 0, i, 3] * imgSize.Width;
                                            float top       = boxesData[0, 0, i, 4] * imgSize.Height;
                                            float right     = boxesData[0, 0, i, 5] * imgSize.Width;
                                            float bottom    = boxesData[0, 0, i, 6] * imgSize.Height;

                                            RectangleF rectF = new RectangleF(left, top, right - left, bottom - top);
                                            Rectangle rect   = Rectangle.Round(rectF);
                                            rect.Intersect(new Rectangle(Point.Empty, imgSize));
                                            CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 0, 0), 1);
                                            CvInvoke.PutText(image[0], label, rect.Location, FontFace.HersheyComplex, 1.0,
                                                             new MCvScalar(0, 0, 255), 2);

                                            int[] masksDim = masks.SizeOfDimension;
                                            using (Mat mask = new Mat(
                                                       masksDim[2],
                                                       masksDim[3],
                                                       DepthType.Cv32F,
                                                       1,
                                                       masks.GetDataPointer(i, classId),
                                                       masksDim[3] * masks.ElementSize))
                                                using (Mat maskLarge = new Mat())
                                                    using (Mat maskLargeInv = new Mat())
                                                        using (Mat subRegion = new Mat(image[0], rect))
                                                            using (Mat largeColor = new Mat(subRegion.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 3))
                                                            {
                                                                CvInvoke.Resize(mask, maskLarge, rect.Size);

                                                                //give the mask at least 30% transparency
                                                                using (ScalarArray sa = new ScalarArray(0.7))
                                                                    CvInvoke.Min(sa, maskLarge, maskLarge);

                                                                //Create the inverse mask for the original image
                                                                using (ScalarArray sa = new ScalarArray(1.0))
                                                                    CvInvoke.Subtract(sa, maskLarge, maskLargeInv);

                                                                //The mask color
                                                                largeColor.SetTo(color);
                                                                if (subRegion.NumberOfChannels == 4)
                                                                {
                                                                    using (Mat bgrSubRegion = new Mat())
                                                                    {
                                                                        CvInvoke.CvtColor(subRegion, bgrSubRegion, ColorConversion.Bgra2Bgr);
                                                                        CvInvoke.BlendLinear(largeColor, bgrSubRegion, maskLarge, maskLargeInv, bgrSubRegion);
                                                                        CvInvoke.CvtColor(bgrSubRegion, subRegion, ColorConversion.Bgr2Bgra);
                                                                    }
                                                                }
                                                                else
                                                                {
                                                                    CvInvoke.BlendLinear(largeColor, subRegion, maskLarge, maskLargeInv, subRegion);
                                                                }
                                                            }
                                        }
                                    }
                                }
                        }
                    long time = 0;

                    return(new Tuple <Mat, String, long>(image[0], msg, time));
                });
                t.Start();

                var result = await t;
                SetImage(t.Result.Item1);
                //String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

                SetMessage(t.Result.Item2);
            };
        }
        /// <summary>
        /// Detect vehicle from the given image
        /// </summary>
        /// <param name="image">The image</param>
        /// <returns>The detected vehicles.</returns>
        public Vehicle[] Detect(IInputArray image)
        {
            float vehicleConfidenceThreshold      = 0.5f;
            float licensePlateConfidenceThreshold = 0.5f;


            double    scale   = 1.0;
            MCvScalar meanVal = new MCvScalar();

            List <Vehicle>      vehicles = new List <Vehicle>();
            List <LicensePlate> plates   = new List <LicensePlate>();

            using (InputArray iaImage = image.GetInputArray())
                using (Mat iaImageMat = iaImage.GetMat())
                    foreach (DetectedObject vehicleOrPlate in _vehicleLicensePlateDetectionModel.Detect(image, 0.0f, 0.0f))
                    {
                        Rectangle region = vehicleOrPlate.Region;

                        if (vehicleOrPlate.ClassId == 1 && vehicleOrPlate.Confident > vehicleConfidenceThreshold)
                        {
                            //this is a vehicle
                            Vehicle v = new Vehicle();
                            v.Region = region;

                            #region find out the type and color of the vehicle

                            using (Mat vehicle = new Mat(iaImageMat, region))
                                using (VectorOfMat vm = new VectorOfMat(2))
                                {
                                    _vehicleAttrRecognizerModel.Predict(vehicle, vm);
                                    //_vehicleAttrRecognizer.Forward(vm, new string[] { "color", "type" });
                                    using (Mat vehicleColorMat = vm[0])
                                        using (Mat vehicleTypeMat = vm[1])
                                        {
                                            float[] vehicleColorData = vehicleColorMat.GetData(false) as float[];
                                            float   maxProbColor     = vehicleColorData.Max();
                                            int     maxIdxColor      = Array.IndexOf(vehicleColorData, maxProbColor);
                                            v.Color = _colorName[maxIdxColor];
                                            float[] vehicleTypeData = vehicleTypeMat.GetData(false) as float[];
                                            float   maxProbType     = vehicleTypeData.Max();
                                            int     maxIdxType      = Array.IndexOf(vehicleTypeData, maxProbType);
                                            v.Type = _vehicleType[maxIdxType];
                                        }
                                }
                            #endregion

                            vehicles.Add(v);
                        }
                        else if (vehicleOrPlate.ClassId == 2 && vehicleOrPlate.Confident > licensePlateConfidenceThreshold)
                        {
                            //this is a license plate
                            LicensePlate p = new LicensePlate();
                            p.Region = region;

                            #region OCR on license plate
                            using (Mat plate = new Mat(iaImageMat, region))
                            {
                                using (Mat inputBlob = DnnInvoke.BlobFromImage(
                                           plate,
                                           scale,
                                           new Size(94, 24),
                                           meanVal,
                                           false,
                                           false,
                                           DepthType.Cv32F))
                                {
                                    _ocr.SetInput(inputBlob, "data");
                                    using (Mat output = _ocr.Forward("decode"))
                                    {
                                        float[]       plateValue = output.GetData(false) as float[];
                                        StringBuilder licensePlateStringBuilder = new StringBuilder();
                                        foreach (int j in plateValue)
                                        {
                                            if (j >= 0)
                                            {
                                                licensePlateStringBuilder.Append(_plateText[j]);
                                            }
                                        }

                                        p.Text = licensePlateStringBuilder.ToString();
                                    }
                                }
                            }
                            #endregion

                            plates.Add(p);
                        }
                    }

            foreach (LicensePlate p in plates)
            {
                foreach (Vehicle v in vehicles)
                {
                    if (v.ContainsPlate(p))
                    {
                        v.LicensePlate = p;
                        break;
                    }
                }
            }

            return(vehicles.ToArray());
        }