Exemplo n.º 1
0
        /// <summary>
        /// Calculate the rotation of an object from image
        /// </summary>
        /// <param name="colorBS"></param>
        /// <param name="currentBlob"></param>
        /// <param name="viBlobs"></param>
        /// <returns>angle degree</returns>
        public static float GetRotation(Image <Bgra, byte> colorBS, BlobObject currentBlob, List <BlobObject> viBlobs)
        {
            Image <Bgra, byte> observedImage = UtilitiesImage.CropImage(colorBS, currentBlob.Rect);
            //observedImage.Save("current.jpg");
            float degree = 360;

            if (viBlobs.Count == 0)
            {
                return(degree);
            }
            else
            {
                VectorOfKeyPoint keypoints1;
                VectorOfKeyPoint keypoints2;
                Matrix <float>   symMatches;
                foreach (BlobObject viblob in viBlobs)
                {
                    //viblob.Image.Save(viblob.Id + ".jpg");
                    bool isDetect = UtilitiesImage.MatchIsSame(viblob.Image, observedImage.Convert <Gray, byte>(), out keypoints1, out keypoints2, out symMatches);

                    if (isDetect)
                    {
                        degree = UtilitiesImage.GetRotationDiff(symMatches, keypoints1, keypoints2);
                    }
                }
                return(degree);
            }
        }
Exemplo n.º 2
0
        internal void DrawColorFrame(Image <Bgra, byte> pColorImage)
        {
            m_Image.Width  = pColorImage.Width;
            m_Image.Height = pColorImage.Height;

            UtilitiesImage.ToImage(m_Image, pColorImage);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Find an object in a object list
        /// </summary>
        /// <param name="currentBlob"></param>
        /// <param name="viBlobs"></param>
        /// <returns>BlobID</returns>
        public bool RecognizeObject(Image <Gray, byte> sourceImage, Image <Gray, byte> toCompare)
        {
            bool isDetect = false;

            // MFunk: Run this a couple of times to get more robust
            int numRuns = 3;

            for (int i = 0; i < numRuns; i++)
            {
                isDetect = UtilitiesImage.MatchIsSame(toCompare, sourceImage);

                if (isDetect)
                {
                    break;
                }
            }

            // seriously no idea whats happening here but somehow this works
            if (!isDetect)
            {
                isDetect = UtilitiesImage.MatchIsSame(toCompare.Canny(10, 50), sourceImage.Canny(10, 50));
                if (isDetect)
                {
                    isDetect = true;
                }
                else
                {
                }
            }

            return(isDetect);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Event handler for Kinect sensor's ColorFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        void Instance_allFramesReady(object pSource, Image <Bgra, byte> pColorImage, Image <Bgra, byte> pColorImageCropped, Image <Gray, Int32> pDepthImage, Image <Gray, Int32> pDepthImageCropped)
        {
            Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal,
                              new Action(() => {
                if (gui_inizialized)
                {
                    m_Image.Height  = pColorImage.Height;
                    m_Image.Width   = pColorImage.Width;
                    gui_inizialized = true;
                }
                pColorImage.Draw(new Rectangle(m_RectAssemblyArea.X, m_RectAssemblyArea.Y, m_RectAssemblyArea.Width, m_RectAssemblyArea.Height), new Bgra(0, 255, 0, 0), 0);
                pColorImage.Draw(new Rectangle(m_RectDrawingArea.X, m_RectDrawingArea.Y, m_RectDrawingArea.Width, m_RectDrawingArea.Height), new Bgra(255, 0, 0, 0), 0);

                UtilitiesImage.ToImage(m_Image, pColorImage);
            })
                              );
        }
Exemplo n.º 5
0
 public void ProcessFrame(Image <Bgra, byte> pColorImage, Image <Bgra, byte> pColorImageCropped,
                          Image <Gray, Int32> pDepthImage, Image <Gray, Int32> pDepthImageCropped)
 {
     if (pColorImage != null)
     {
         m_ColorImage = pColorImage;
         UtilitiesImage.ToImage(this.KinectRGBImage, CameraManager.Instance.ColorImage);
     }
     if (pColorImageCropped != null)
     {
         UtilitiesImage.ToImage(this.KinectRGBCropImage, CameraManager.Instance.ColorImageCropped);
     }
     if (pDepthImage != null)
     {
         UtilitiesImage.ToImage(this.KinectDepthImage, CameraManager.Instance.DepthImage);
     }
     if (pDepthImageCropped != null)
     {
         UtilitiesImage.ToImage(this.KinectDepthCropImage, CameraManager.Instance.DepthImageCropped);
     }
 }
Exemplo n.º 6
0
        //Code to be called within a certain part of the main ProccessFrame Method
        public void Object_ProccessFrame_Draw(bool hasToUpdateUI, Image <Bgra, Byte> pImage)
        {
            if (m_TakeScreenShotFromZone)
            {
                if (m_SelectedZone != null)
                {
                    long now = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
                    if (now - m_ScreenshotTakenTimestamp > 200) // take picture after 200ms - frame should be gone by then
                    {
                        // crop image
                        Rectangle boundingBox = new Rectangle(m_SelectedZone.X, m_SelectedZone.Y, m_SelectedZone.Width, m_SelectedZone.Height);
                        pImage.ROI = boundingBox;

                        ObjectDetectionManager.Instance.SaveAndAddObjectToDatabase(pImage);

                        SceneManager.Instance.DisableObjectScenes = false;
                        m_TakeScreenShotFromZone = false;
                        m_SelectedZone           = null;

                        CvInvoke.cvResetImageROI(pImage);
                    }
                }
            }

            // first clear all the feedback from previous frame
            SceneManager.Instance.TemporaryObjectsTextScene.Clear();

            // should we check for objects?
            if (SettingsManager.Instance.Settings.ObjectsRecognizeObject &&
                ObjectDetectionManager.Instance.CurrentLayout != null &&
                hasToUpdateUI)
            {
                // walk over all zones
                foreach (ObjectDetectionZone zone in ObjectDetectionManager.Instance.CurrentLayout.ObjectDetectionZones)
                {
                    // crop image
                    Rectangle boundingBox = new Rectangle(zone.X, zone.Y, zone.Width, zone.Height);
                    pImage.ROI = boundingBox;
                    Image <Gray, byte> grayscaleImage = pImage.Copy().Convert <Gray, byte>();
                    CvInvoke.cvResetImageROI(pImage);

                    // walk over all objects
                    foreach (TrackableObject obj in Database.DatabaseManager.Instance.Objects)
                    {
                        if (ObjectDetectionManager.Instance.RecognizeObject(obj.EmguImage, grayscaleImage))
                        {
                            // YAY we found an object
                            // trigger stuff
                            WorkflowManager.Instance.OnObjectRecognized(obj);

                            // update last seen timestamp
                            obj.LastSeenTimeStamp = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
                            obj.LastSeenZoneId    = zone.Id;

                            // display visual feedback
                            Scene.SceneText textItem = ObjectDetectionManager.Instance.createSceneTextHeadingObjectDetectionZone(zone, obj.Name);
                            SceneManager.Instance.TemporaryObjectsTextScene.Add(textItem);
                        }
                    }
                }
            }

            CvInvoke.cvResetImageROI(pImage);
            if (ObjectDetectionManager.Instance.CurrentLayout != null)
            {
                if (SettingsManager.Instance.Settings.ObjectsVisualFeedbackDisplay && (ObjectDetectionManager.Instance.CurrentLayout.ObjectDetectionZones.Count != 0))
                {
                    //write boxes
                    MCvFont font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_SIMPLEX, 0.5, 0.5);
                    foreach (ObjectDetectionZone z in ObjectDetectionManager.Instance.CurrentLayout.ObjectDetectionZones)
                    {
                        // draw ID
                        pImage.Draw(z.Id + "", ref font, new System.Drawing.Point(z.X, z.Y), new Bgra(0, 0, 0, 0));
                        // draw Frame
                        if (z.wasRecentlyTriggered())
                        {
                            pImage.Draw(new Rectangle(z.X, z.Y, z.Width, z.Height), new Bgra(0, 255, 255, 0), 0);
                        }
                        else
                        {
                            pImage.Draw(new Rectangle(z.X, z.Y, z.Width, z.Height), new Bgra(255, 255, 255, 0), 0);
                        }
                    }
                }

                UtilitiesImage.ToImage(image, pImage);

                if (SettingsManager.Instance.Settings.ObjectsVisualFeedbackProject)
                {
                    SceneManager.Instance.TemporaryObjectsScene.Clear();
                    // add a temporary scene for each box
                    foreach (ObjectDetectionZone z in ObjectDetectionManager.Instance.CurrentLayout.ObjectDetectionZones)
                    {
                        // false = call from the display loop
                        SceneManager.Instance.TemporaryObjectsScene.Add(ObjectDetectionManager.Instance.createSceneBoxForObjectDetectionZone(z, false));
                    }
                }
                else
                {
                    SceneManager.Instance.TemporaryObjectsScene.Clear();
                }
            }
        }
Exemplo n.º 7
0
        public void ProcessBoxProjection(Image <Bgra, byte> pColorBitSource)
        {
            if (SettingsManager.Instance.Settings.CheckBoxStartProjection == false)
            {
                return;
            }

            Image <Gray, Int32> depthImg = KinectManager.Instance.GetCurrentDepthImageCropped();

            int[,] map = new int[SettingsManager.Instance.Settings.IntegerUpDownXBox, SettingsManager.Instance.Settings.IntegerUpDownYBox];
            BlobManager.Instance.MasterBlob = BlobManager.FindAllBlob(
                depthImg,
                BlobManager.Instance.MasterBlob,
                pColorBitSource,
                SettingsManager.Instance.Settings.BlobRadio);
            Image <Bgra, byte> outputImage = depthImg.Convert <Bgra, byte>();
            Tuple <RectangleF, System.Windows.Media.Color>             outputFree     = new Tuple <RectangleF, System.Windows.Media.Color>(new RectangleF(), new System.Windows.Media.Color());
            List <Tuple <PointF[], System.Windows.Media.Color> >       outputBoxPoint = new List <Tuple <PointF[], System.Windows.Media.Color> >();
            List <Tuple <PointF, String, System.Windows.Media.Color> > outputString   = new List <Tuple <PointF, String, System.Windows.Media.Color> >();

            Bgra color = new Bgra(0, 255, 0, 0);

            if (BlobManager.Instance.MasterBlob != null)
            {
                for (int i = 0; i < BlobManager.Instance.MasterBlob.Count; i++)
                {
                    BlobObject currentBlob = BlobManager.Instance.MasterBlob[i];
                    //currentBlob.Id == 0;

                    if (m_Free_Checked)
                    {
                        map = FreeSpaceManager.RenderObject(currentBlob.CornerPoints, map, depthImg.Width, depthImg.Height);
                    }

                    // map_check fliegt raus
                    if (m_Map_Checked)
                    {
                        if (BlobManager.Instance.MasterBlob[i].Hits == 0)
                        {
                            int count;
                            count = UtilitiesImage.FeaturePerObject(pColorBitSource, BlobManager.Instance.MasterBlob[i].Rect);

                            BlobManager.Instance.MasterBlob[i].Hits = count;
                        }
                        color = UtilitiesImage.MappingColor(BlobManager.Instance.MasterBlob[i].Hits);
                    }

                    if (m_Tracking_Checked)
                    {
                        System.Drawing.Point center = new System.Drawing.Point((int)(currentBlob.Center.X * depthImg.Width), (int)(currentBlob.Center.Y * depthImg.Height));
                        //outputImage.Draw(currentBlob.Name.ToString(), ref m_Font, center, new Bgr(System.Drawing.Color.Red));
                        outputString.Add(new Tuple <PointF, String, System.Windows.Media.Color>(currentBlob.Center, currentBlob.Name.ToString(), System.Windows.Media.Color.FromRgb(0, 255, 0)));
                    }

                    List <LineSegment2DF> depthboxLines = UtilitiesImage.PointsToImageLine(currentBlob.CornerPoints, depthImg.Width, depthImg.Height);
                    foreach (LineSegment2DF line in depthboxLines)
                    {
                        outputImage.Draw(line, color, 2);
                    }

                    PointF[] corner = new PointF[4];
                    for (int cur = 0; cur < 4; cur++)
                    {
                        corner[cur] = new PointF(BlobManager.Instance.MasterBlob[i].CornerPoints[cur].X, 1 - BlobManager.Instance.MasterBlob[i].CornerPoints[cur].Y);
                    }

                    outputBoxPoint.Add(new Tuple <PointF[], System.Windows.Media.Color>(corner, System.Windows.Media.Color.FromRgb((byte)color.Red, (byte)color.Green, (byte)color.Blue)));
                }
            }

            // update the reference
            ObjectDetectionManager.Instance.MasterBlob = BlobManager.Instance.MasterBlob;

            RectangleF freeSpace = new RectangleF();

            if (m_Free_Checked)
            {
                depthImg  = FreeSpaceManager.DrawMaxSubmatrix(depthImg.Convert <Bgra, byte>(), map).Convert <Gray, Int32>();
                freeSpace = FreeSpaceManager.DrawMaxSubmatrix(map, (float)(depthImg.Width / (float)SettingsManager.Instance.Settings.IntegerUpDownXBox / depthImg.Width),
                                                              (float)(depthImg.Height / (float)SettingsManager.Instance.Settings.IntegerUpDownYBox / depthImg.Height));
            }

            //draw on table
            # region projection
            if (SettingsManager.Instance.Settings.CheckBoxStartProjection)
Exemplo n.º 8
0
        private void buttonScan_Click(object sender, RoutedEventArgs e)
        {
            foreach (BlobObject blob in BlobManager.Instance.MasterBlob)
            {
                if (m_ViBlob.Count == 0)
                {
                    int[, ,] depthStructur = UtilitiesImage.ObjectDepthPoint(blob.Rect.Location,
                                                                             blob.Rect, KinectManager.Instance.GetCurrentDepthImageCropped());
                    this.m_ViBlob.Add(new BlobObject(blob.Image, depthStructur, blob.CornerPoints, blob.Rect, blob.Center, blob.Hits, BlobManager.Instance.Id, BlobManager.Instance.Id.ToString()));
                    blob.Id = BlobManager.Instance.Id;
                    BlobManager.Instance.Id++;

                    Image <Gray, Int32> testImage = new Image <Gray, int>(depthStructur.GetLength(0), depthStructur.GetLength(1));

                    Parallel.For(0, depthStructur.GetLength(1), y =>
                    {
                        for (int x = 0; x < depthStructur.GetLength(0); x++)
                        {
                            testImage.Data[y, x, 0] = depthStructur[y, x, 0];
                        }
                    });

                    Image <Gray, byte> cannyImage = testImage.Canny(10, 30);

                    // check if object dir exists
                    if (!Directory.Exists(ProjectConstants.OBJECT_DIR))
                    {
                        // if not create it
                        Directory.CreateDirectory(ProjectConstants.OBJECT_DIR);
                    }

                    // save the scanned image using milliseconds as a uid
                    long   millis    = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
                    string imagePath = ProjectConstants.OBJECT_DIR + "\\" + millis + ".jpg";
                    blob.Image.Save(imagePath);

                    TrackableObject obj = new TrackableObject();
                    obj.Image         = imagePath;
                    obj.ImageFullPath = System.IO.Path.GetFullPath(imagePath);
                    obj.Name          = "Peter";
                    obj.Category      = "Group 1";
                    DatabaseManager.Instance.insertTrackableObject(obj);
                    DatabaseManager.Instance.listTrackableObject(); // refresh
                }
                else
                {
                    //blobId = DepthHelper.RecognizeObjekt(blob, this.viBlob);

                    //TODO: check previously saved objects here

                    if (blob.Id == 1)
                    {
                        int[, ,] depthStructur = UtilitiesImage.ObjectDepthPoint(blob.Rect.Location,
                                                                                 blob.Rect, KinectManager.Instance.GetCurrentDepthImage());
                        this.m_ViBlob.Add(new BlobObject(blob.Image, depthStructur,
                                                         blob.CornerPoints, blob.Rect, blob.Center, blob.Hits,
                                                         BlobManager.Instance.Id, BlobManager.Instance.Id.ToString()));
                        blob.Id = BlobManager.Instance.Id;
                        BlobManager.Instance.Id++;
                    }
                }
            }
        }
Exemplo n.º 9
0
        public void m_KinectConnector_allFramesReady(object pSource, Image <Bgra, Byte> pColorFrame, Image <Gray, Int16> pDepthFrame)
        {
            if (pColorFrame == null || pDepthFrame == null)
            {
                return;
            }

            OnOrgAllReady(this, pColorFrame, pDepthFrame);

            Image <Gray, Int16> depthFrameBuffer = pDepthFrame.Copy();

            if (m_KinectSettings.Ratio == 0)
            {
                m_KinectSettings.Ratio = 2;
            }

            Image <Bgra, Byte> colorFrameBuffer = pColorFrame.Resize((int)(pColorFrame.Width / m_KinectSettings.Ratio), (int)(pColorFrame.Height / m_KinectSettings.Ratio), Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);


            int xS = (int)(m_KinectSettings.XScale / m_KinectSettings.Ratio);
            int yS = (int)(m_KinectSettings.YScale / m_KinectSettings.Ratio);
            int xD = 0;
            int yD = 0;
            int xC = 0;
            int yC = 0;
            int w  = 0;
            int h  = depthFrameBuffer.Height;

            // depth image
            // what is the new width?
            if (xS < 0 && (depthFrameBuffer.Width + xS > colorFrameBuffer.Width))
            {
                // rechts und links raus -- sollte nie vorkommen
                w  = colorFrameBuffer.Width;
                xD = -xS;
                xC = 0;
            }
            else if (depthFrameBuffer.Width + xS > colorFrameBuffer.Width)
            {
                // rechts raus
                w  = colorFrameBuffer.Width - xS;
                xD = 0;
                xC = xS; // xS is positive
            }
            else if (xS < 0)
            {
                // links raus
                w  = depthFrameBuffer.Width + xS;
                xD = -xS;
                xC = 0;
            }
            else
            {
                // drin
                w  = depthFrameBuffer.Width;
                xD = 0;
                xC = xS;
            }

            // what is the new height?
            if (yS < 0 && (depthFrameBuffer.Height + yS > colorFrameBuffer.Height))
            {
                // oben und unten raus
                h  = colorFrameBuffer.Height;
                yD = -yS;
                yC = 0;
            }
            else if (depthFrameBuffer.Height + yS > colorFrameBuffer.Height)
            {
                // nur unten raus
                h  = colorFrameBuffer.Height - yS;
                yD = 0;
                yC = yS;
            }
            else if (yS < 0)
            {
                // nur oben raus
                h  = depthFrameBuffer.Height + yS;
                yD = -yS;
                yC = 0;
            }
            else
            {
                // drin
                h  = depthFrameBuffer.Height;
                yD = 0;
                yC = yS;
            }

            //Ausschnitt Setzen
            depthFrameBuffer.ROI = new System.Drawing.Rectangle(xD, yD, w, h);
            colorFrameBuffer.ROI = new System.Drawing.Rectangle(xC, yC, w, h);

            m_ImageSize = new System.Drawing.Size(w, h);

            if (colorImg == null || colorImg.Width != w || colorImg.Height != h)
            {
                colorImg = new Image <Bgra, Byte>(w, h);
            }
            if (m_DepthImg == null || m_DepthImg.Width != w || m_DepthImg.Height != h)
            {
                m_DepthImg = new Image <Gray, Int32>(w, h);
            }

            colorImg = colorFrameBuffer.Convert <Bgra, Byte>().Copy();
            CvInvoke.cvResetImageROI(colorFrameBuffer);

            colorImgCropped = UtilitiesImage.CropImage(colorImg, m_AssemblyArea);
            // colorImgCropped = colorImg;
            Image <Gray, Int32> newDepthImgCropped = UtilitiesImage.CropImage(m_DepthImg, m_AssemblyArea);

            // Image<Gray, Int32> newDepthImgCropped = m_DepthImg;


            m_DepthImg = depthFrameBuffer.Convert <Gray, float>().SmoothGaussian(5).Convert <Gray, Int32>();
            // m_DepthImg = pDepthFrame.Convert<Gray, float>().Convert<Gray, Int32>();

            //Shmoothing
            if (m_SmoothingOn)
            {
                /*this.m_SmoothingFilter.InnerBandThreshold = (int)InnerBandThresholdInput.Value;
                 * this.m_SmoothingFilter.OuterBandThreshold = (int)OuterBandThresholdInput.Value;
                 * this.m_SmoothingAverage.AverageFrameCount = (int)AverageFrameCountInput.Value;
                 * this.m_SmoothingMaximum.MaximumFrameCount = (int)AverageFrameCountInput.Value;*/

                //depthImg = this.m_SmoothingFilter.CreateFilteredDepthArray(pDepthFrame, m_KinectConnector.GetDepthFrameDescription().Width, m_KinectConnector.GetDepthFrameDescription().Height);
                //depthImg = this.m_SmoothingMaximum.CreateMaximumDepthArray(depthPixel, depthFrame.Width, depthFrame.Height);
                m_DepthImgCropped = this.m_SmoothingAverage.CreateAverageDepthArray(newDepthImgCropped);
            }
            else
            {
                m_DepthImgCropped = newDepthImgCropped;
            }


            OnAllFramesReady(this, colorImg, colorImgCropped, m_DepthImg, m_DepthImgCropped);

            // event driven update
            CameraManager.Instance.SetImages(colorImg, colorImgCropped, m_DepthImg, m_DepthImgCropped);
            CameraManager.Instance.SetOrgImages(pSource, pColorFrame, pDepthFrame.Convert <Gray, Int32>());
        }
Exemplo n.º 10
0
        /// <summary>
        /// Find an object in a object list
        /// </summary>
        /// <param name="currentBlob"></param>
        /// <param name="viBlobs"></param>
        /// <returns>BlobID</returns>
        public static string RecognizeObject(BlobObject currentBlob, List <TrackableObject> dbObjects)
        {
            bool   isDetect = false;
            string blobId   = "not identified";

            bool careAboutWorkflow = false;

            if (WorkflowManager.Instance.LoadedWorkflow != null)
            {
                careAboutWorkflow = true;
            }

            if (dbObjects.Count != 0)
            {
                foreach (TrackableObject obj in dbObjects)
                {
                    if (careAboutWorkflow)
                    {
                        // first check if object belongs to current workingstep
                        if (obj.Category == "" + WorkflowManager.Instance.CurrentWorkingStep.StepNumber)
                        {
                            // MFunk: Run this a couple of times to get more robust
                            int numRuns = 3;

                            for (int i = 0; i < numRuns; i++)
                            {
                                isDetect = UtilitiesImage.MatchIsSame(obj.EmguImage, currentBlob.Image);

                                if (isDetect)
                                {
                                    break;
                                }
                            }

                            if (isDetect)
                            {
                                WorkflowManager.Instance.OnObjectRecognized(obj);
                                blobId = obj.Name;
                                break;
                            }
                            else
                            {
                                isDetect = UtilitiesImage.MatchIsSame(obj.EmguImage.Canny(10, 50).Convert <Gray, Int32>(), currentBlob.Image.Canny(10, 50).Convert <Gray, Int32>());
                                if (isDetect)
                                {
                                    isDetect = true;
                                    blobId   = obj.Name;
                                    WorkflowManager.Instance.OnObjectRecognized(obj);
                                    break;
                                }
                                else
                                {
                                    blobId = "not identified";
                                }
                            }

                            if (isDetect)
                            {
                                m_LastWasCorrupted = false;
                            }
                        }
                    }
                    else
                    {
                        // do not care about workflow

                        // MFunk: Run this a couple of times to get more robust
                        int numRuns = 3;

                        for (int i = 0; i < numRuns; i++)
                        {
                            isDetect = UtilitiesImage.MatchIsSame(obj.EmguImage, currentBlob.Image);

                            if (isDetect)
                            {
                                break;
                            }
                        }

                        if (isDetect)
                        {
                            WorkflowManager.Instance.OnObjectRecognized(obj);
                            blobId = obj.Name;
                            break;
                        }
                        else
                        {
                            isDetect = UtilitiesImage.MatchIsSame(obj.EmguImage.Canny(10, 50).Convert <Gray, Int32>(), currentBlob.Image.Canny(10, 50).Convert <Gray, Int32>());
                            if (isDetect)
                            {
                                isDetect = true;
                                blobId   = obj.Name;
                                WorkflowManager.Instance.OnObjectRecognized(obj);
                                break;
                            }
                            else
                            {
                                blobId = "not identified";
                            }
                        }
                    }
                }
            }

            if (isDetect)
            {
                m_LastWasCorrupted = false;
            }
            return(blobId);
        }
Exemplo n.º 11
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="openCVImg"></param>
        /// <param name="masterBlobs"></param>
        /// <param name="colorBS"></param>
        /// <param name="mode"></param>
        /// <returns></returns>
        public static List <BlobObject> FindAllBlob(Image <Gray, Int32> openCVImg,
                                                    List <BlobObject> masterBlobs,
                                                    Image <Bgra, byte> colorBS,
                                                    bool mode)
        {
            List <BlobObject> retList = new List <BlobObject>();

            try
            {
                Image <Gray, byte> gray_image = openCVImg.Convert <Gray, byte>();
                List <BlobObject>  newBlobs   = new List <BlobObject>();
                if (mode == false)
                {
                    #region using cvBlob
                    Emgu.CV.Cvb.CvBlobs        resultingBlobs = new Emgu.CV.Cvb.CvBlobs();
                    Emgu.CV.Cvb.CvBlobDetector bDetect        = new Emgu.CV.Cvb.CvBlobDetector();
                    uint numWebcamBlobsFound = bDetect.Detect(gray_image, resultingBlobs);

                    using (MemStorage stor = new MemStorage())
                    {
                        foreach (Emgu.CV.Cvb.CvBlob targetBlob in resultingBlobs.Values)
                        {
                            if (targetBlob.Area > 200)
                            {
                                var contour = targetBlob.GetContour(stor);

                                MCvBox2D box = contour.GetMinAreaRect();

                                PointF[] boxCorner = UtilitiesImage.ToPercent(contour.GetMinAreaRect().GetVertices(), gray_image.Width, gray_image.Height);

                                PointF center = UtilitiesImage.ToPercent(contour.GetMinAreaRect().center, gray_image.Width, gray_image.Height);

                                RectangleF rect = UtilitiesImage.ToPercent(targetBlob.BoundingBox, gray_image.Width, gray_image.Height);

                                Image <Gray, byte> newCropImg = UtilitiesImage.CropImage(colorBS.Convert <Gray, byte>(), rect);
                                newBlobs.Add(new BlobObject(newCropImg, null, boxCorner, rect, center, 0, 0, 0 + ""));
                                //stor.Clear();
                            }
                        }
                    }
                    #endregion
                }
                else
                {
                    #region using contour
                    using (MemStorage storage = new MemStorage())
                    {
                        //Find contours with no holes try CV_RETR_EXTERNAL to find holes
                        Contour <System.Drawing.Point> contours = gray_image.FindContours(
                            Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                            Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL,
                            storage);

                        for (int i = 0; contours != null; contours = contours.HNext)
                        {
                            i++;

                            //double area = contours.Area;
                            if (contours.Area > 200)
                            {
                                PointF[]           boxCorner  = UtilitiesImage.ToPercent(contours.GetMinAreaRect().GetVertices(), gray_image.Width, gray_image.Height);
                                PointF             center     = UtilitiesImage.ToPercent(contours.GetMinAreaRect().center, gray_image.Width, gray_image.Height);
                                RectangleF         rect       = UtilitiesImage.ToPercent(contours.BoundingRectangle, gray_image.Width, gray_image.Height);
                                Image <Bgra, byte> newCropImg = UtilitiesImage.CropImage(colorBS, rect);
                                newBlobs.Add(new BlobObject(newCropImg.Convert <Gray, byte>(), null, boxCorner, rect, center, 0, 0, 0 + ""));
                            }
                        }
                    }
                    #endregion
                }

                // read objects from database now
                List <TrackableObject> objects = DatabaseManager.Instance.Objects.ToList();

                if (objects.Count == 0)
                {
                    foreach (BlobObject b in newBlobs)
                    {
                        retList.Add(new BlobObject(b.Image, null, b.CornerPoints, b.Rect, b.Center, 0, 0, 0 + ""));
                    }
                }
                else
                {
                    #region

                    // size and position werden abgeglichen
                    List <Tuple <double, double, int, int> > trackInfo = new List <Tuple <double, double, int, int> >();
                    for (int newblob = 0; newblob < newBlobs.Count; newblob++)
                    {
                        for (int master = 0; master < masterBlobs.Count; master++)
                        {
                            double d = UtilitiesImage.Distance(newBlobs[newblob].Center, masterBlobs[master].Center);
                            double s = UtilitiesImage.DiffSize(newBlobs[newblob].Rect.Size, masterBlobs[master].Rect.Size);
                            trackInfo.Add(new Tuple <double, double, int, int>(d, s, master, newblob));
                        }
                    }


                    trackInfo.Sort((x, y) => x.Item1.CompareTo(y.Item1));
                    List <int> newItem = new List <int>();

                    if (!m_LastWasCorrupted)
                    {
                        while (trackInfo.Count != 0)
                        {
                            if (trackInfo[0].Item2 < 0.2)
                            {
                                int        masterId  = trackInfo[0].Item3;
                                int        newId     = trackInfo[0].Item4;
                                BlobObject newObject = new BlobObject(newBlobs[newId].Image, newBlobs[newId].DepthStructur, newBlobs[newId].CornerPoints, newBlobs[newId].Rect, newBlobs[newId].Center, masterBlobs[masterId].Hits, masterBlobs[masterId].Id, masterBlobs[masterId].Name);
                                retList.Add(newObject);
                                trackInfo.RemoveAll(item => item.Item3 == masterId);
                                trackInfo.RemoveAll(item => item.Item4 == newId);
                                newItem.Add(newId);
                            }
                            else
                            {
                                trackInfo.RemoveAt(0);
                            }
                        }
                        newItem.Sort();
                        //}


                        // check the images based on their features
                        for (int i = newBlobs.Count - 1; i >= 0; i--)
                        {
                            if (newItem.Count != 0 && i == newItem.Last())
                            {
                                newItem.RemoveAt(newItem.Count - 1);
                            }
                            else
                            {
                                // set the name according to the recognized object
                                string currentBlobId = RecognizeObject(newBlobs[i], objects);
                                newBlobs[i].Name = currentBlobId;
                                retList.Add(newBlobs[i]);
                            }
                        }
                    }
                }
            }
            catch (CvException e)
            {
                Logger.Instance.Log(e.Message, Logger.LoggerState.ERROR);
                return(retList);
            }
            catch (Exception e)
            {
                Logger.Instance.Log(e.Message, Logger.LoggerState.ERROR);
                //mark a flag that the last frame was corrupt
                m_LastWasCorrupted = true;
            }
            #endregion

            return(retList);
        }
Exemplo n.º 12
0
        /// <summary>
        /// Event handler for Kinect sensor's ColorFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        void Instance_allOrgFramesReady(object pSource, Image <Bgra, byte> pColorImage, Image <Gray, Int32> pDepthImage)
        {
            Dispatcher.Invoke(DispatcherPriority.DataBind,
                              new Action(() =>
            {
                Image <Bgra, byte> tempDepth = pDepthImage.Convert <Bgra, byte>();
                tempDepth = tempDepth.Resize(sliderDepthRatio.Value, INTER.CV_INTER_LINEAR);

                Image <Bgra, byte> tempColor = pColorImage.Copy();
                int xS = (int)sliderDepthX.Value;
                int yS = (int)sliderDepthY.Value;
                int xD = 0;
                int yD = 0;
                int xC = 0;
                int yC = 0;
                int w  = tempDepth.Width;
                int h  = tempDepth.Height;

                if (xS >= 0 && tempColor.Width < tempDepth.Width + xS)
                {
                    // rechts raus & links drin
                    w -= tempDepth.Width + xS - tempColor.Width;
                    xC = xS;
                }
                else if (xS < 0 && tempColor.Width < tempDepth.Width + xS)
                {
                    // rechts raus & links raus
                    w  = tempColor.Width;
                    xD = -xS;
                }
                else if (xS < 0)
                {
                    // links raus
                    xD = -xS;
                    w += xS;
                }
                else
                {
                    //mitte horizontal
                    xC = xS;
                }

                if (yS >= 0 && tempColor.Height < tempDepth.Height + yS)
                {
                    //unten raus & oben drin
                    h -= tempDepth.Height + yS - tempColor.Height;
                    yC = yS;
                }
                else if (yS < 0 && tempColor.Height < tempDepth.Height + yS)
                {
                    //unten raus & oben raus
                    h  = tempColor.Height;
                    yD = -yS;
                }
                else if (yS < 0)
                {
                    //oben raus
                    h += yS;
                    yD = -yS;
                }
                else
                {
                    //mitte vetical
                    yC = yS;
                }

                try
                {
                    //Ausschnitt Setzen
                    tempDepth.ROI = new Rectangle(xD, yD, w, h);
                    tempColor.ROI = new Rectangle(xC, yC, w, h);

                    //Bild Kopieren
                    tempDepth.CopyTo(tempColor);

                    BoundsOkay = true;
                }
                catch (CvException)
                {
                    //Prevent crash when offsets are out of bounds
                    BoundsOkay = false;
                }

                //Auschnitt zurück setzen
                tempColor.ROI = new Rectangle(0, 0, pColorImage.Width, pColorImage.Height);

                UtilitiesImage.ToImage(m_Image, tempColor);
            })
                              );
        }