public List<Object> findObjects(VideoReader videoReader, IDepthReader depthReader, Action<ushort[], CameraSpacePoint[]> mappingFunction, IProgress<int> progress)
        {
            List<Object> objects = new List<Object>();
            var patternNameToFrames = new Dictionary<string, List<int>>();

            for (int i = 0; i < videoReader.frameCount; i++)
            {
                string[] patterns = Directory.GetFiles(patternDirectory);

                foreach (var pattern in patterns)
                {
                    if (pattern.EndsWith(".png"))
                    {
                        long matchTime;
                        using (Mat modelImage = CvInvoke.Imread(pattern, LoadImageType.Grayscale))
                        using (Mat observedImage = videoReader.getFrame(i))
                            using (Mat grayImage = new Mat())
                        {
                            CvInvoke.CvtColor(observedImage, grayImage, ColorConversion.Rgb2Gray);
                            Mat homography;
                            VectorOfKeyPoint modelKeyPoints;
                            VectorOfKeyPoint observedKeyPoints;
                            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                            {
                                Mat mask;
                                DrawMatches.FindMatch(modelImage, grayImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                                out mask, out homography);

                                if (homography != null)
                                {
                                    //draw a rectangle along the projected model
                                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                                    System.Drawing.PointF[] pts = new System.Drawing.PointF[]
                                    {
                                      new System.Drawing.PointF(rect.Left, rect.Bottom),
                                      new System.Drawing.PointF(rect.Right, rect.Bottom),
                                      new System.Drawing.PointF(rect.Right, rect.Top),
                                      new System.Drawing.PointF(rect.Left, rect.Top)
                                    };
                                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                                    Point[] points = Array.ConvertAll<System.Drawing.PointF, Point>(pts, Point.Round);
                                }
                            }
                        }
                    }
                }
            }

            return objects;
        }
Пример #2
0
        public static async Task<List<Object>> DetectObjects(String info, VideoReader videoReader,
                                                                IDepthReader depthReader,
                                                                List<IObjectRecogAlgo> objectRecognizers,
                                                                Dictionary<IObjectRecogAlgo, bool> objectRecognizerIncluded,
                                                                Action<ushort[], CameraSpacePoint[]> mappingFunction)
        {
            List<Object> detectedObjects = new List<Object>();

            int numberOfSteps = (videoReader.frameCount + 1) * objectRecognizerIncluded.Values.Where(v => v).Count();

            if (numberOfSteps == 0) return detectedObjects;

            ProgressForm pf = new ProgressForm();
            pf.Text = info;
            pf.progressBar.Maximum = 100;
            pf.progressBar.Step = 1;

            var progress2 = new Progress<int>(v =>
            {
                // This lambda is executed in context of UI thread,
                // so it can safely update form controls
                pf.progressBar.Value = (v + 1) * 100.0 / numberOfSteps <= 100 ? (int)((v + 1) * 100.0 / numberOfSteps) : 100;
                if (v + 1 < numberOfSteps)
                {
                    pf.description.Text = "Process at frame " + v;
                }
                else
                {
                    pf.description.Text = "Save down the objects ";
                    pf.Dispose();
                }
            });

            Task t = Task.Run(() =>
            {
                int recognizerCounter = 0;
                foreach (var objectRecognizer in objectRecognizers)
                {
                    if (objectRecognizerIncluded[objectRecognizer])
                    {
                        var progress = new Progress<int>(v =>
                        {
                            (progress2 as IProgress<int>).Report(recognizerCounter * (videoReader.frameCount + 1) + v);
                        });

                        if (videoReader != null && depthReader != null)
                        {
                            var objects = objectRecognizer.findObjects(videoReader, depthReader, mappingFunction, progress);
                            detectedObjects.AddRange(objects);
                        }

                        recognizerCounter++;
                    }
                }
            });

            pf.StartPosition = FormStartPosition.CenterParent;
            pf.ShowDialog();

            await t;

            return detectedObjects;
        }
Пример #3
0
        private async void handlePlayButtonOn()
        {
            recordMode = RecordMode.Playingback;
            saveRecordedSession.Focus();
            finishRecording.Wait();

            playButton.ImageIndex = 3;

            playBackRgbLock = new object();
            rgbBoard.playbackLock = playBackRgbLock;

            // Reread rgb file

            videoReader = new VideoReader(tempRgbFileName, (int)lastWrittenRgbTime.TotalMilliseconds);
            rgbBoard.mat = videoReader.getFrame(0);

            if (rgbBoard.mat == null)
            {
                // video is null
                return;
            }
            rgbBoard.Image = rgbBoard.mat.Bitmap;

            rgbPlaybackFrameNo = videoReader.frameCount;
            int frameWidth = videoReader.frameWidth;
            int frameHeight = videoReader.frameHeight;

            depthReader = new BaseDepthReader(tempDepthFileName);
            depthFrame = depthReader.getFrameCount();
            depthWidth = depthReader.getWidth();
            depthHeight = depthReader.getHeight();
            depthBitmap = new Bitmap(depthWidth, depthHeight, PixelFormat.Format32bppRgb);
            depthValuesToByte = new byte[depthWidth * depthHeight * 4];

            endTimeLabel.Text = (int)lastWrittenRgbTime.TotalMinutes + ":" + lastWrittenRgbTime.Seconds.ToString("00") + "." + lastWrittenRgbTime.Milliseconds.ToString("000");

            // Set values for playBar
            playBar.Enabled = true;
            playBar.Minimum = 1;
            playBar.Maximum = rgbPlaybackFrameNo;
            playBar.Value = 1;

            StringBuilder sb = new StringBuilder();

            sb.Append("Temporary rgb file has written " + rgbStreamedFrame + " frames \n");
            sb.Append("Temporary rgb file has " + rgbPlaybackFrameNo + " frames of size = ( " + frameWidth + " , " + frameHeight + " ) \n");
            sb.Append("Temporary depth file has " + depthFrame + " frames of size = ( " + depthWidth + " , " + depthHeight + " ) \n");
            sb.Append("RecordingTime " + lastWrittenRgbTime + "\n");

            if (rigDetected.Value)
            {
                sb.Append("Rig(s) is detected.\n");
            } else
            {
                sb.Append("No rig is detected.\n");
            }

            detectedObjects = await Utils.DetectObjects("Progress on capturing", videoReader, depthReader, objectRecognizers, objectRecognizerIncluded, this.coordinateMapper.MapColorFrameToCameraSpace);

            Console.WriteLine("In playback " + detectedObjects.Count);

            sb.Append(detectedObjects.Count + " objects is detected.\n");

            helperTextBox.Text = sb.ToString();

            main.Invalidate();
        }
Пример #4
0
        private void saveRecordedSession_Click(object sender, EventArgs e)
        {
            Project currentProject = main.currentProject;

            if (currentProject == null)
            {
                FolderBrowserDialog fbd = new FolderBrowserDialog();
                DialogResult folderResult = fbd.ShowDialog(main);
                if (folderResult == DialogResult.OK)
                {
                    string pathToFolder = fbd.SelectedPath;

                    foreach (String fileName in new[] { tempRgbFileName, tempDepthFileName, tempConfigFileName })
                    {
                        string dstFileName = pathToFolder + Path.DirectorySeparatorChar + mapFileName[fileName];
                        if (!File.Exists(dstFileName))
                            File.Copy(fileName, dstFileName);
                    }
                }
                return;
            }

            var result = MessageBox.Show(main, "Do you want to add captured session into project " + currentProject.name +
                "?. Yes if you do, no if you want to save it into a separate folder", "Save session", MessageBoxButtons.YesNoCancel);

            switch (result)
            {
                case DialogResult.Yes:
                    SessionInfo sessionInfo = new SessionInfo(main, currentProject.name);
                    sessionInfo.StartPosition = FormStartPosition.CenterParent;
                    if (videoReader != null)
                    {
                        videoReader.Dispose();
                        videoReader = null;
                    }

                    if (depthReader != null)
                    {
                        depthReader.Dispose();
                        depthReader = null;
                    }

                    sessionInfo.okButton.Click += new System.EventHandler(this.addSessionOkClick);
                    sessionInfo.ShowDialog();
                    break;
                case DialogResult.No:
                    FolderBrowserDialog fbd = new FolderBrowserDialog();
                    DialogResult folderResult = fbd.ShowDialog(main);
                    if (folderResult == DialogResult.OK)
                    {
                        string pathToFolder = fbd.SelectedPath;

                        foreach (String fileName in new[] { tempRgbFileName, tempDepthFileName, tempConfigFileName })
                        {
                            string dstFileName = pathToFolder + Path.DirectorySeparatorChar + mapFileName[fileName];
                            Console.WriteLine("Copy to file; dstFileName " + dstFileName);
                            if (!File.Exists(dstFileName))
                                File.Copy(fileName, dstFileName);
                        }
                    }
                    break;
                case DialogResult.Cancel:
                    break;
                default:
                    break;
            }

            // Back to annotating
            main.tabs.SelectedIndex = 0;
        }
Пример #5
0
        private void handlePlayButtonOff()
        {
            playButton.ImageIndex = 2;

            recordMode = RecordMode.None;
            optionsTable.Enabled = true;

            tmspStartRecording = null;

            helperTextBox.Text = "";

            if (videoReader != null)
            {
                videoReader.Dispose();
                videoReader = null;
            }

            if (depthReader != null)
            {
                depthReader.Dispose();
                depthReader = null;
            }
            endTimeLabel.Text = "00:00.000";

            main.Invalidate();
        }
Пример #6
0
        public List<Object> findObjects(VideoReader videoReader, IDepthReader depthReader, Action<ushort[], CameraSpacePoint[]> mappingFunction, IProgress<int> progress)
        {
            var shapeOptimizer = new FlatAnglesOptimizer(160);
            Console.WriteLine("Find glyph box");
            List<Object> objects = new List<Object>();

            if (videoReader == null)
                return objects;

            /// For each frame (int frameNo)
            /// For each recognized glyph in frame (int faceIndex)
            /// Store A tuple of 
            ///              -    A list of bounding points for recognized glyph
            ///              -    A glyphface instance
            var recognizedGlyphs = new Dictionary<int, Dictionary<int, Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>>>();

            Bitmap image = null;
            Mat m = null;
            Bitmap grayImage = null;
            Bitmap edges = null;
            UnmanagedImage grayUI = null;
            Bitmap transformed = null;
            Bitmap transformedOtsu = null;


            //A control flag, true if at the previous frame loop, there is detection of some glyph
            // When this is true, only searching for glyph box in some neighborhood of the previous glyphs
            // if the frame is not an anchor frame
            bool previousFrameDetection = false;

            for (int frameNo = 0; frameNo < videoReader.frameCount; frameNo++)
            //for (int frameNo = 80; frameNo < 81; frameNo++)
            {
                if (progress != null)
                    progress.Report(frameNo);

                Console.WriteLine("=============================================");
                Console.WriteLine("Frame no " + frameNo);
                m = videoReader.getFrame(frameNo);
                if (m == null)
                {
                    break;
                }

                var startPos = new System.Drawing.Point();

                getImageForProcessing(recognizedGlyphs, m, previousFrameDetection, frameNo, ref image, ref startPos);

                // Reset right after using
                previousFrameDetection = false;


                Stopwatch stopwatch = Stopwatch.StartNew();

                /// Adapt from Glyph Recognition Prototyping
                /// Copyright © Andrew Kirillov, 2009-2010
                /// 
                // 1 - Grayscale
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);

                stopwatch.Stop();
                Console.WriteLine("Gray scale time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 2 - Edge detection
                DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
                edges = edgeDetector.Apply(grayImage);

                stopwatch.Stop();
                Console.WriteLine("Edge detection time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 3 - Threshold edges
                // Was set to 20 and the number of detected glyphs are too low
                // Should be set higher
                Threshold thresholdFilter = new Threshold(60);
                thresholdFilter.ApplyInPlace(edges);

                stopwatch.Stop();
                Console.WriteLine("Threshold time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 4 - Blob Counter
                BlobCounter blobCounter = new BlobCounter();
                blobCounter.MinHeight = 32;
                blobCounter.MinWidth = 32;
                blobCounter.FilterBlobs = true;
                blobCounter.ObjectsOrder = ObjectsOrder.Size;

                blobCounter.ProcessImage(edges);
                Blob[] blobs = blobCounter.GetObjectsInformation();

                stopwatch.Stop();
                Console.WriteLine("Blob finding time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                //// create unmanaged copy of source image, so we could draw on it
                //UnmanagedImage imageData = UnmanagedImage.FromManagedImage(image);

                // Get unmanaged copy of grayscale image, so we could access it's pixel values
                grayUI = UnmanagedImage.FromManagedImage(grayImage);

                // list of found dark/black quadrilaterals surrounded by white area
                List<List<IntPoint>> foundObjects = new List<List<IntPoint>>();
                // shape checker for checking quadrilaterals
                SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

                Console.WriteLine("edgePoints");

                // 5 - check each blob
                for (int i = 0, n = blobs.Length; i < n; i++)
                {
                    List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                    List<IntPoint> corners = null;

                    // does it look like a quadrilateral ?
                    if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                    {
                        // do some more checks to filter so unacceptable shapes
                        // if ( CheckIfShapeIsAcceptable( corners ) )
                        {

                            // get edge points on the left and on the right side
                            List<IntPoint> leftEdgePoints, rightEdgePoints;
                            blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints);

                            // calculate average difference between pixel values from outside of the shape and from inside
                            float diff = this.CalculateAverageEdgesBrightnessDifference(
                                leftEdgePoints, rightEdgePoints, grayUI);

                            // check average difference, which tells how much outside is lighter than inside on the average
                            if (diff > 20)
                            {
                                //Drawing.Polygon(imageData, corners, Color.FromArgb(255, 255, 0, 0));
                                // add the object to the list of interesting objects for further processing
                                foundObjects.Add(corners);
                            }
                        }
                    }
                }

                stopwatch.Stop();
                Console.WriteLine("Finding black quadiralateral surrounded by white area = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();


                int recordedTimeForRgbFrame = (int)(videoReader.totalMiliTime * frameNo / (videoReader.frameCount - 1));

                CameraSpacePoint[] csps = new CameraSpacePoint[videoReader.frameWidth * videoReader.frameHeight];
                if (depthReader != null)
                {
                    ushort[] depthValues = depthReader.readFrameAtTime(recordedTimeForRgbFrame);
                    mappingFunction(depthValues, csps);
                }

                stopwatch.Stop();
                Console.WriteLine("Mapping into 3 dimensional = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();


                // further processing of each potential glyph
                foreach (List<IntPoint> corners in foundObjects)
                {
                    Console.WriteLine("found some corner");
                    // 6 - do quadrilateral transformation
                    QuadrilateralTransformation quadrilateralTransformation =
                        new QuadrilateralTransformation(corners, 20 * (glyphSize + 2), 20 * (glyphSize + 2));

                    transformed = quadrilateralTransformation.Apply(grayImage);

                    // 7 - otsu thresholding
                    OtsuThreshold otsuThresholdFilter = new OtsuThreshold();
                    transformedOtsu = otsuThresholdFilter.Apply(transformed);

                    // +2 for offset
                    int glyphSizeWithBoundary = glyphSize + 2;
                    SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer(glyphSizeWithBoundary);

                    bool[,] glyphValues = gr.Recognize(ref transformedOtsu,
                        new Rectangle(0, 0, 20 * (glyphSize + 2), 20 * (glyphSize + 2)));

                    bool[,] resizedGlyphValues = new bool[glyphSize, glyphSize];

                    for (int i = 0; i < glyphSize; i++)
                        for (int j = 0; j < glyphSize; j++)
                        {
                            resizedGlyphValues[i, j] = glyphValues[i + 1, j + 1];
                        }


                    GlyphFace face = new GlyphFace(resizedGlyphValues, glyphSize);

                    Console.WriteLine("Find glyph face " + face.ToString());

                    // Transfer back to original coordinates
                    List<IntPoint> originalCorners = new List<IntPoint>();
                    foreach (var corner in corners)
                    {
                        IntPoint p = new IntPoint(corner.X + startPos.X, corner.Y + startPos.Y);
                        originalCorners.Add(p);
                    }

                    Console.WriteLine("Corner points");
                    foreach (var corner in originalCorners)
                    {
                        Console.WriteLine(corner);
                    }

                    for (int boxPrototypeIndex = 0; boxPrototypeIndex < boxPrototypes.Count; boxPrototypeIndex++)
                    {
                        var boxPrototype = boxPrototypes[boxPrototypeIndex];
                        foreach (int faceIndex in boxPrototype.indexToGlyphFaces.Keys)
                        {
                            if (face.Equals(boxPrototype.indexToGlyphFaces[faceIndex]))
                            {
                                if (!recognizedGlyphs.ContainsKey(boxPrototypeIndex))
                                {
                                    Console.WriteLine("Detect new type of prototype " + boxPrototypeIndex);
                                    recognizedGlyphs[boxPrototypeIndex] = new Dictionary<int, Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>>();
                                }

                                if (!recognizedGlyphs[boxPrototypeIndex].ContainsKey(frameNo))
                                {
                                    Console.WriteLine("Detect glyph at frame " + frameNo + " for prototype " + boxPrototypeIndex);
                                    if (!previousFrameDetection)
                                    {
                                        previousFrameDetection = true;
                                    }

                                    recognizedGlyphs[boxPrototypeIndex][frameNo] = new Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>();
                                }

                                recognizedGlyphs[boxPrototypeIndex][frameNo][faceIndex] = new Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>(
                                    originalCorners.Select(p => new System.Drawing.PointF(p.X, p.Y)).ToList(),
                                    face,
                                    depthReader != null ?
                                    originalCorners.Select(p => p.X + p.Y * videoReader.frameWidth >= 0 && p.X + p.Y * videoReader.frameWidth < videoReader.frameWidth * videoReader.frameHeight ?
                                                                   new Point3(csps[p.X + p.Y * videoReader.frameWidth].X,
                                                                   csps[p.X + p.Y * videoReader.frameWidth].Y,
                                                                   csps[p.X + p.Y * videoReader.frameWidth].Z) : new Point3()).ToList() :
                                                                   new List<Point3>()
                                    );

                                break;
                            }
                        }
                    }
                }

                foreach (IDisposable o in new IDisposable[] { image, m, grayImage, edges, grayUI, transformed, transformedOtsu })
                {
                    if (o != null)
                    {
                        o.Dispose();
                    }
                }

                stopwatch.Stop();
                Console.WriteLine("Transforming and detect glyph = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

            }

            if (progress != null)
                progress.Report(videoReader.frameCount);

            if (recognizedGlyphs.Keys.Count != 0)
            {
                foreach (int boxPrototypeIndex in recognizedGlyphs.Keys)
                {
                    Console.WriteLine("For boxPrototypeIndex = " + boxPrototypeIndex + " Found glyph box at " + recognizedGlyphs[boxPrototypeIndex].Keys.Count + " frames");
                    GlyphBoxObject oneBox = null;
                    var boxPrototype = boxPrototypes[boxPrototypeIndex];
                    oneBox = new GlyphBoxObject(currentSession, "", Color.Black, 1, videoReader.fileName);
                    oneBox.boxPrototype = boxPrototype;
                    foreach (int frameNo in recognizedGlyphs[boxPrototypeIndex].Keys)
                    {
                        var glyphs = recognizedGlyphs[boxPrototypeIndex][frameNo];

                        var glyphBounds = new List<List<System.Drawing.PointF>>();
                        var glyph3DBounds = new List<List<Point3>>();
                        var faces = new List<GlyphFace>();

                        foreach (var glyph in glyphs)
                        {
                            glyphBounds.Add(glyph.Value.Item1);
                            faces.Add(glyph.Value.Item2);
                            glyph3DBounds.Add(glyph.Value.Item3);
                        }

                        oneBox.setBounding(frameNo, glyphSize, glyphBounds, faces);
                        oneBox.set3DBounding(frameNo, glyphSize, glyph3DBounds, faces);

                        //Point3 center = new Point3();
                        //Quaternions quaternions = new Quaternions();

                        //oneBox.set3DBounding(frameNo, new CubeLocationMark(frameNo, center, quaternions));
                    }

                    objects.Add(oneBox);
                }
            }

            return objects;
        }