public List<Object> findObjects(VideoReader videoReader, IDepthReader depthReader, Action<ushort[], CameraSpacePoint[]> mappingFunction, IProgress<int> progress)
        {
            List<Object> objects = new List<Object>();
            var patternNameToFrames = new Dictionary<string, List<int>>();

            for (int i = 0; i < videoReader.frameCount; i++)
            {
                string[] patterns = Directory.GetFiles(patternDirectory);

                foreach (var pattern in patterns)
                {
                    if (pattern.EndsWith(".png"))
                    {
                        long matchTime;
                        using (Mat modelImage = CvInvoke.Imread(pattern, LoadImageType.Grayscale))
                        using (Mat observedImage = videoReader.getFrame(i))
                            using (Mat grayImage = new Mat())
                        {
                            CvInvoke.CvtColor(observedImage, grayImage, ColorConversion.Rgb2Gray);
                            Mat homography;
                            VectorOfKeyPoint modelKeyPoints;
                            VectorOfKeyPoint observedKeyPoints;
                            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                            {
                                Mat mask;
                                DrawMatches.FindMatch(modelImage, grayImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                                out mask, out homography);

                                if (homography != null)
                                {
                                    //draw a rectangle along the projected model
                                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                                    System.Drawing.PointF[] pts = new System.Drawing.PointF[]
                                    {
                                      new System.Drawing.PointF(rect.Left, rect.Bottom),
                                      new System.Drawing.PointF(rect.Right, rect.Bottom),
                                      new System.Drawing.PointF(rect.Right, rect.Top),
                                      new System.Drawing.PointF(rect.Left, rect.Top)
                                    };
                                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                                    Point[] points = Array.ConvertAll<System.Drawing.PointF, Point>(pts, Point.Round);
                                }
                            }
                        }
                    }
                }
            }

            return objects;
        }
        private async void handlePlayButtonOn()
        {
            recordMode = RecordMode.Playingback;
            saveRecordedSession.Focus();
            finishRecording.Wait();

            playButton.ImageIndex = 3;

            playBackRgbLock = new object();
            rgbBoard.playbackLock = playBackRgbLock;

            // Reread rgb file

            videoReader = new VideoReader(tempRgbFileName, (int)lastWrittenRgbTime.TotalMilliseconds);
            rgbBoard.mat = videoReader.getFrame(0);

            if (rgbBoard.mat == null)
            {
                // video is null
                return;
            }
            rgbBoard.Image = rgbBoard.mat.Bitmap;

            rgbPlaybackFrameNo = videoReader.frameCount;
            int frameWidth = videoReader.frameWidth;
            int frameHeight = videoReader.frameHeight;

            depthReader = new BaseDepthReader(tempDepthFileName);
            depthFrame = depthReader.getFrameCount();
            depthWidth = depthReader.getWidth();
            depthHeight = depthReader.getHeight();
            depthBitmap = new Bitmap(depthWidth, depthHeight, PixelFormat.Format32bppRgb);
            depthValuesToByte = new byte[depthWidth * depthHeight * 4];

            endTimeLabel.Text = (int)lastWrittenRgbTime.TotalMinutes + ":" + lastWrittenRgbTime.Seconds.ToString("00") + "." + lastWrittenRgbTime.Milliseconds.ToString("000");

            // Set values for playBar
            playBar.Enabled = true;
            playBar.Minimum = 1;
            playBar.Maximum = rgbPlaybackFrameNo;
            playBar.Value = 1;

            StringBuilder sb = new StringBuilder();

            sb.Append("Temporary rgb file has written " + rgbStreamedFrame + " frames \n");
            sb.Append("Temporary rgb file has " + rgbPlaybackFrameNo + " frames of size = ( " + frameWidth + " , " + frameHeight + " ) \n");
            sb.Append("Temporary depth file has " + depthFrame + " frames of size = ( " + depthWidth + " , " + depthHeight + " ) \n");
            sb.Append("RecordingTime " + lastWrittenRgbTime + "\n");

            if (rigDetected.Value)
            {
                sb.Append("Rig(s) is detected.\n");
            } else
            {
                sb.Append("No rig is detected.\n");
            }

            detectedObjects = await Utils.DetectObjects("Progress on capturing", videoReader, depthReader, objectRecognizers, objectRecognizerIncluded, this.coordinateMapper.MapColorFrameToCameraSpace);

            Console.WriteLine("In playback " + detectedObjects.Count);

            sb.Append(detectedObjects.Count + " objects is detected.\n");

            helperTextBox.Text = sb.ToString();

            main.Invalidate();
        }
        public List<Object> findObjects(VideoReader videoReader, IDepthReader depthReader, Action<ushort[], CameraSpacePoint[]> mappingFunction, IProgress<int> progress)
        {
            var shapeOptimizer = new FlatAnglesOptimizer(160);
            Console.WriteLine("Find glyph box");
            List<Object> objects = new List<Object>();

            if (videoReader == null)
                return objects;

            /// For each frame (int frameNo)
            /// For each recognized glyph in frame (int faceIndex)
            /// Store A tuple of 
            ///              -    A list of bounding points for recognized glyph
            ///              -    A glyphface instance
            var recognizedGlyphs = new Dictionary<int, Dictionary<int, Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>>>();

            Bitmap image = null;
            Mat m = null;
            Bitmap grayImage = null;
            Bitmap edges = null;
            UnmanagedImage grayUI = null;
            Bitmap transformed = null;
            Bitmap transformedOtsu = null;


            //A control flag, true if at the previous frame loop, there is detection of some glyph
            // When this is true, only searching for glyph box in some neighborhood of the previous glyphs
            // if the frame is not an anchor frame
            bool previousFrameDetection = false;

            for (int frameNo = 0; frameNo < videoReader.frameCount; frameNo++)
            //for (int frameNo = 80; frameNo < 81; frameNo++)
            {
                if (progress != null)
                    progress.Report(frameNo);

                Console.WriteLine("=============================================");
                Console.WriteLine("Frame no " + frameNo);
                m = videoReader.getFrame(frameNo);
                if (m == null)
                {
                    break;
                }

                var startPos = new System.Drawing.Point();

                getImageForProcessing(recognizedGlyphs, m, previousFrameDetection, frameNo, ref image, ref startPos);

                // Reset right after using
                previousFrameDetection = false;


                Stopwatch stopwatch = Stopwatch.StartNew();

                /// Adapt from Glyph Recognition Prototyping
                /// Copyright © Andrew Kirillov, 2009-2010
                /// 
                // 1 - Grayscale
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);

                stopwatch.Stop();
                Console.WriteLine("Gray scale time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 2 - Edge detection
                DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
                edges = edgeDetector.Apply(grayImage);

                stopwatch.Stop();
                Console.WriteLine("Edge detection time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 3 - Threshold edges
                // Was set to 20 and the number of detected glyphs are too low
                // Should be set higher
                Threshold thresholdFilter = new Threshold(60);
                thresholdFilter.ApplyInPlace(edges);

                stopwatch.Stop();
                Console.WriteLine("Threshold time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 4 - Blob Counter
                BlobCounter blobCounter = new BlobCounter();
                blobCounter.MinHeight = 32;
                blobCounter.MinWidth = 32;
                blobCounter.FilterBlobs = true;
                blobCounter.ObjectsOrder = ObjectsOrder.Size;

                blobCounter.ProcessImage(edges);
                Blob[] blobs = blobCounter.GetObjectsInformation();

                stopwatch.Stop();
                Console.WriteLine("Blob finding time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                //// create unmanaged copy of source image, so we could draw on it
                //UnmanagedImage imageData = UnmanagedImage.FromManagedImage(image);

                // Get unmanaged copy of grayscale image, so we could access it's pixel values
                grayUI = UnmanagedImage.FromManagedImage(grayImage);

                // list of found dark/black quadrilaterals surrounded by white area
                List<List<IntPoint>> foundObjects = new List<List<IntPoint>>();
                // shape checker for checking quadrilaterals
                SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

                Console.WriteLine("edgePoints");

                // 5 - check each blob
                for (int i = 0, n = blobs.Length; i < n; i++)
                {
                    List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                    List<IntPoint> corners = null;

                    // does it look like a quadrilateral ?
                    if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                    {
                        // do some more checks to filter so unacceptable shapes
                        // if ( CheckIfShapeIsAcceptable( corners ) )
                        {

                            // get edge points on the left and on the right side
                            List<IntPoint> leftEdgePoints, rightEdgePoints;
                            blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints);

                            // calculate average difference between pixel values from outside of the shape and from inside
                            float diff = this.CalculateAverageEdgesBrightnessDifference(
                                leftEdgePoints, rightEdgePoints, grayUI);

                            // check average difference, which tells how much outside is lighter than inside on the average
                            if (diff > 20)
                            {
                                //Drawing.Polygon(imageData, corners, Color.FromArgb(255, 255, 0, 0));
                                // add the object to the list of interesting objects for further processing
                                foundObjects.Add(corners);
                            }
                        }
                    }
                }

                stopwatch.Stop();
                Console.WriteLine("Finding black quadiralateral surrounded by white area = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();


                int recordedTimeForRgbFrame = (int)(videoReader.totalMiliTime * frameNo / (videoReader.frameCount - 1));

                CameraSpacePoint[] csps = new CameraSpacePoint[videoReader.frameWidth * videoReader.frameHeight];
                if (depthReader != null)
                {
                    ushort[] depthValues = depthReader.readFrameAtTime(recordedTimeForRgbFrame);
                    mappingFunction(depthValues, csps);
                }

                stopwatch.Stop();
                Console.WriteLine("Mapping into 3 dimensional = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();


                // further processing of each potential glyph
                foreach (List<IntPoint> corners in foundObjects)
                {
                    Console.WriteLine("found some corner");
                    // 6 - do quadrilateral transformation
                    QuadrilateralTransformation quadrilateralTransformation =
                        new QuadrilateralTransformation(corners, 20 * (glyphSize + 2), 20 * (glyphSize + 2));

                    transformed = quadrilateralTransformation.Apply(grayImage);

                    // 7 - otsu thresholding
                    OtsuThreshold otsuThresholdFilter = new OtsuThreshold();
                    transformedOtsu = otsuThresholdFilter.Apply(transformed);

                    // +2 for offset
                    int glyphSizeWithBoundary = glyphSize + 2;
                    SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer(glyphSizeWithBoundary);

                    bool[,] glyphValues = gr.Recognize(ref transformedOtsu,
                        new Rectangle(0, 0, 20 * (glyphSize + 2), 20 * (glyphSize + 2)));

                    bool[,] resizedGlyphValues = new bool[glyphSize, glyphSize];

                    for (int i = 0; i < glyphSize; i++)
                        for (int j = 0; j < glyphSize; j++)
                        {
                            resizedGlyphValues[i, j] = glyphValues[i + 1, j + 1];
                        }


                    GlyphFace face = new GlyphFace(resizedGlyphValues, glyphSize);

                    Console.WriteLine("Find glyph face " + face.ToString());

                    // Transfer back to original coordinates
                    List<IntPoint> originalCorners = new List<IntPoint>();
                    foreach (var corner in corners)
                    {
                        IntPoint p = new IntPoint(corner.X + startPos.X, corner.Y + startPos.Y);
                        originalCorners.Add(p);
                    }

                    Console.WriteLine("Corner points");
                    foreach (var corner in originalCorners)
                    {
                        Console.WriteLine(corner);
                    }

                    for (int boxPrototypeIndex = 0; boxPrototypeIndex < boxPrototypes.Count; boxPrototypeIndex++)
                    {
                        var boxPrototype = boxPrototypes[boxPrototypeIndex];
                        foreach (int faceIndex in boxPrototype.indexToGlyphFaces.Keys)
                        {
                            if (face.Equals(boxPrototype.indexToGlyphFaces[faceIndex]))
                            {
                                if (!recognizedGlyphs.ContainsKey(boxPrototypeIndex))
                                {
                                    Console.WriteLine("Detect new type of prototype " + boxPrototypeIndex);
                                    recognizedGlyphs[boxPrototypeIndex] = new Dictionary<int, Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>>();
                                }

                                if (!recognizedGlyphs[boxPrototypeIndex].ContainsKey(frameNo))
                                {
                                    Console.WriteLine("Detect glyph at frame " + frameNo + " for prototype " + boxPrototypeIndex);
                                    if (!previousFrameDetection)
                                    {
                                        previousFrameDetection = true;
                                    }

                                    recognizedGlyphs[boxPrototypeIndex][frameNo] = new Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>();
                                }

                                recognizedGlyphs[boxPrototypeIndex][frameNo][faceIndex] = new Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>(
                                    originalCorners.Select(p => new System.Drawing.PointF(p.X, p.Y)).ToList(),
                                    face,
                                    depthReader != null ?
                                    originalCorners.Select(p => p.X + p.Y * videoReader.frameWidth >= 0 && p.X + p.Y * videoReader.frameWidth < videoReader.frameWidth * videoReader.frameHeight ?
                                                                   new Point3(csps[p.X + p.Y * videoReader.frameWidth].X,
                                                                   csps[p.X + p.Y * videoReader.frameWidth].Y,
                                                                   csps[p.X + p.Y * videoReader.frameWidth].Z) : new Point3()).ToList() :
                                                                   new List<Point3>()
                                    );

                                break;
                            }
                        }
                    }
                }

                foreach (IDisposable o in new IDisposable[] { image, m, grayImage, edges, grayUI, transformed, transformedOtsu })
                {
                    if (o != null)
                    {
                        o.Dispose();
                    }
                }

                stopwatch.Stop();
                Console.WriteLine("Transforming and detect glyph = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

            }

            if (progress != null)
                progress.Report(videoReader.frameCount);

            if (recognizedGlyphs.Keys.Count != 0)
            {
                foreach (int boxPrototypeIndex in recognizedGlyphs.Keys)
                {
                    Console.WriteLine("For boxPrototypeIndex = " + boxPrototypeIndex + " Found glyph box at " + recognizedGlyphs[boxPrototypeIndex].Keys.Count + " frames");
                    GlyphBoxObject oneBox = null;
                    var boxPrototype = boxPrototypes[boxPrototypeIndex];
                    oneBox = new GlyphBoxObject(currentSession, "", Color.Black, 1, videoReader.fileName);
                    oneBox.boxPrototype = boxPrototype;
                    foreach (int frameNo in recognizedGlyphs[boxPrototypeIndex].Keys)
                    {
                        var glyphs = recognizedGlyphs[boxPrototypeIndex][frameNo];

                        var glyphBounds = new List<List<System.Drawing.PointF>>();
                        var glyph3DBounds = new List<List<Point3>>();
                        var faces = new List<GlyphFace>();

                        foreach (var glyph in glyphs)
                        {
                            glyphBounds.Add(glyph.Value.Item1);
                            faces.Add(glyph.Value.Item2);
                            glyph3DBounds.Add(glyph.Value.Item3);
                        }

                        oneBox.setBounding(frameNo, glyphSize, glyphBounds, faces);
                        oneBox.set3DBounding(frameNo, glyphSize, glyph3DBounds, faces);

                        //Point3 center = new Point3();
                        //Quaternions quaternions = new Quaternions();

                        //oneBox.set3DBounding(frameNo, new CubeLocationMark(frameNo, center, quaternions));
                    }

                    objects.Add(oneBox);
                }
            }

            return objects;
        }