// Use this for initialization
        void Start()
        {
//            Utils.setDebugMode(true);

            using (Mat patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg")) {
                if (patternMat.total() == 0)
                {
                    patternRawImage.gameObject.SetActive(false);
                }
                else
                {
                    Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);

                    Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);

                    Utils.matToTexture2D(patternMat, patternTexture);

                    patternRawImage.texture = patternTexture;
                    patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);

                    patternRawImage.gameObject.SetActive(true);
                }
            }

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init();


            detector = ORB.create();
            detector.setMaxFeatures(1000);
            keypoints = new MatOfKeyPoint();
        }
Beispiel #2
0
    public KeyPoint[] getKeyPoints(Mat camMat, int nKeyPoints)
    {
        orb = ORB.Create(nKeyPoints);
        KeyPoint[] keyPoints = orb.Detect(camMat);

        return(keyPoints);
    }
 public CallerChain DecodeChain(byte[] encoded)
 {
     try {
         VersionedData[] versions = DecodeExportedVersions(encoded,
                                                           _magicTagCallChain);
         CallerChainImpl decodedChain = null;
         for (int i = 0; i < versions.Length; i++)
         {
             // Se houver duas versões, a versão atual virá antes da versão legacy.
             if (versions[i].version == ExportVersion.ConstVal)
             {
                 TypeCode signedDataTypeCode =
                     ORB.create_tc_for_type(typeof(SignedData));
                 SignedData exportedChain =
                     (SignedData)
                     _codec.decode_value(versions[i].encoded, signedDataTypeCode);
                 CallChain chain = CallerChainImpl.UnmarshalCallChain(exportedChain);
                 if (decodedChain == null)
                 {
                     decodedChain = new CallerChainImpl(chain.bus, chain.caller,
                                                        chain.target, chain.originators, exportedChain);
                 }
                 else
                 {
                     decodedChain.Signed.Chain = exportedChain;
                 }
             }
             if (versions[i].version == CurrentVersion.ConstVal)
             {
                 TypeCode exportedChainTypeCode =
                     ORB.create_tc_for_type(typeof(ExportedCallChain));
                 ExportedCallChain exportedChain =
                     (ExportedCallChain)
                     _codec.decode_value(versions[i].encoded, exportedChainTypeCode);
                 core.v2_0.services.access_control.CallChain chain =
                     CallerChainImpl.UnmarshalLegacyCallChain(exportedChain.signedChain);
                 if (decodedChain == null)
                 {
                     decodedChain = new CallerChainImpl(exportedChain.bus, chain.caller,
                                                        chain.target, chain.originators, exportedChain.signedChain);
                 }
                 else
                 {
                     decodedChain.Signed.LegacyChain = exportedChain.signedChain;
                 }
             }
         }
         if (decodedChain != null)
         {
             return(decodedChain);
         }
     }
     catch (GenericUserException e) {
         const string message =
             "Falha inesperada ao decodificar uma cadeia exportada.";
         Logger.Error(message, e);
         throw new InvalidEncodedStreamException(message, e);
     }
     throw new InvalidEncodedStreamException("Versão de cadeia incompatível.");
 }
Beispiel #4
0
        public void TestSlotModifyInClientRecContextAndServer()
        {
            try {
                int slotId = m_testInterceptorInit.RequestIntercept.SlotId;
                ORB orb    = OrbServices.GetSingleton();
                omg.org.PortableInterceptor.Current current =
                    (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
                int contextEntryVal = 4;
                current.set_slot(slotId, contextEntryVal);

                System.Int32 arg    = 1;
                System.Int32 result = m_testService.TestAddToContextData(arg);
                Assertion.AssertEquals(arg + contextEntryVal, result);

                Assertion.Assert("service context not present", m_testInterceptorInit.RequestIntercept.HasReceivedContextElement);
                Assertion.AssertEquals("service context content", arg + contextEntryVal,
                                       m_testInterceptorInit.RequestIntercept.ContextElement.TestEntry);

                current =
                    (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
                Assertion.AssertEquals("slot was modified", contextEntryVal, current.get_slot(slotId));
            } finally {
                m_testInterceptorInit.RequestIntercept.ClearInvocationHistory();
            }
        }
Beispiel #5
0
        public void TestORB()
        {
            ORB orb = new ORB(700);

            //String[] parameters = orb.GetParamNames();
            EmguAssert.IsTrue(TestFeature2DTracker(orb, orb), "Unable to find homography matrix");
        }
    private void Start()
    {
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
        webCamTextureToMatHelper.Initialize();

        grayMat      = new Mat();
        makerGrayMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC1);

        makerTexture = new Texture2D(originMakerTexture.width, originMakerTexture.height);
        Graphics.CopyTexture(originMakerTexture, makerTexture);

        detector  = ORB.create();
        extractor = ORB.create();

        // Get Key Points of Maker
        makerMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC3);
        Utils.texture2DToMat(makerTexture, makerMat, false);
        makerKeyPoints   = new MatOfKeyPoint();
        makerDescriptors = new Mat();

        Imgproc.cvtColor(makerMat, makerGrayMat, Imgproc.COLOR_BGR2GRAY);

        detector.detect(makerGrayMat, makerKeyPoints);
        extractor.compute(makerGrayMat, makerKeyPoints, makerDescriptors);

        matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
    }
Beispiel #7
0
    void DetectKeypoints()
    {
        if (capture_mat == null)
        {
            int w = webcam_texture.width;
            int h = webcam_texture.height;
            capture_mat      = new Mat(h, w, CvType.CV_8UC3);
            debug_mat        = new Mat(h, w, CvType.CV_8UC3);
            result_texuture  = new Texture2D(w, h, TextureFormat.ARGB32, false);
            rawImage.texture = result_texuture;

            message.SetMessage("w=" + w + ", h=" + h);
        }

        Utils.webCamTextureToMat(webcam_texture, capture_mat);

        // 特徴点&特徴量の抽出 (デフォルトは500)
        ORB detector  = ORB.create(500);
        ORB extractor = ORB.create(500);

        keypoints   = new MatOfKeyPoint();
        descriptors = new Mat();

        detector.detect(capture_mat, keypoints);
        extractor.compute(capture_mat, keypoints, descriptors);

        // ORBのデフォルトの場合、rows<= 500, cols=32 (rowsは特徴点の数、colsは特徴量)
        //Debug.Log("descriptors rows=" + descriptors.rows() + ", cols=" + descriptors.cols());
    }
        public void MathingWithLshIndexParams()
        {
            using var img1        = Image("tsukuba_left.png", ImreadModes.Grayscale);
            using var img2        = Image("tsukuba_right.png", ImreadModes.Grayscale);
            using var orb         = ORB.Create(500);
            using var descriptor1 = new Mat();
            using var descriptor2 = new Mat();
            orb.DetectAndCompute(img1, null, out _, descriptor1);
            orb.DetectAndCompute(img2, null, out _, descriptor2);

            using var indexParams = new LshIndexParams(12, 20, 2);

            Assert.Equal(MatType.CV_8UC1, descriptor1.Type());
            Assert.Equal(MatType.CV_8UC1, descriptor2.Type());

            // LshIndexParams requires Binary descriptor, so it must NOT convert to CV_32F.
            //descriptor1.ConvertTo(descriptor1, MatType.CV_32F);
            //descriptor2.ConvertTo(descriptor2, MatType.CV_32F);

            using var matcher = new FlannBasedMatcher(indexParams);
            DMatch[] matches = matcher.Match(descriptor1, descriptor2);

            Assert.NotEmpty(matches);

            /*
             *  using (var view = new Mat())
             *  using (var window = new Window())
             *  {
             *      Cv2.DrawMatches(img1, keyPoints1, img2, keyPoints2, matches, view);
             *      window.ShowImage(view);
             *      Cv2.WaitKey();
             *  }*/
        }
Beispiel #9
0
        /// <summary>
        /// Initializes a new instance of the <see cref="PatternDetector"/> class.
        /// </summary>
        /// <param name="detector">Detector.</param>
        /// <param name="extractor">Extractor.</param>
        /// <param name="matcher">Matcher.</param>
        /// <param name="ratioTest">If set to <c>true</c> ratio test.</param>
        public PatternDetector(ORB detector, ORB extractor, DescriptorMatcher matcher, bool ratioTest = false)
        {
            if (detector == null)
            {
                detector = ORB.create();
                detector.setMaxFeatures(1000);
            }
            if (extractor == null)
            {
                extractor = ORB.create();
                extractor.setMaxFeatures(1000);
            }
            if (matcher == null)
            {
                matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
            }

            m_detector  = detector;
            m_extractor = extractor;
            m_matcher   = matcher;

            enableRatioTest                 = ratioTest;
            enableHomographyRefinement      = true;
            homographyReprojectionThreshold = 3;

            m_queryKeypoints    = new MatOfKeyPoint();
            m_queryDescriptors  = new Mat();
            m_matches           = new MatOfDMatch();
            m_knnMatches        = new List <MatOfDMatch>();
            m_grayImg           = new Mat();
            m_warpedImg         = new Mat();
            m_roughHomography   = new Mat();
            m_refinedHomography = new Mat();
        }
Beispiel #10
0
        /// <summary>
        /// Initializes a new instance of the <see cref="PatternDetector"/> class.
        /// </summary>
        /// <param name="detector">Detector.</param>
        /// <param name="extractor">Extractor.</param>
        /// <param name="matcher">Matcher.</param>
        /// <param name="ratioTest">If set to <c>true</c> ratio test.</param>
        public PatternDetector(ORB detector, ORB extractor, bool ratioTest = false)
        {
            if (detector == null)
            {
                detector             = ORB.Create();
                detector.MaxFeatures = 1000;
            }
            if (extractor == null)
            {
                extractor             = ORB.Create();
                extractor.MaxFeatures = 1000;
            }

            m_detector  = detector;
            m_extractor = extractor;

            enableRatioTest                 = ratioTest;
            enableHomographyRefinement      = true;
            homographyReprojectionThreshold = 3;

            m_queryKeypoints   = new KeyPoint[] { };
            m_queryDescriptors = new Mat();

            m_grayImg           = new Mat();
            m_warpedImg         = new Mat();
            m_roughHomography   = new Mat();
            m_refinedHomography = new Mat();

            warpedKeypoints = new KeyPoint[] { };
        }
Beispiel #11
0
        public void Mathing()
        {
            using (var img1 = Image("tsukuba_left.png", ImreadModes.GrayScale))
                using (var img2 = Image("tsukuba_right.png", ImreadModes.GrayScale))
                    using (var orb = ORB.Create(500))
                        using (var descriptor1 = new Mat())
                            using (var descriptor2 = new Mat())
                            {
                                KeyPoint[] keyPoints1, keyPoints2;
                                orb.DetectAndCompute(img1, null, out keyPoints1, descriptor1);
                                orb.DetectAndCompute(img2, null, out keyPoints2, descriptor2);

                                // Flann needs the descriptors to be of type CV_32F
                                Assert.AreEqual(MatType.CV_8UC1, descriptor1.Type());
                                Assert.AreEqual(MatType.CV_8UC1, descriptor2.Type());
                                descriptor1.ConvertTo(descriptor1, MatType.CV_32F);
                                descriptor2.ConvertTo(descriptor2, MatType.CV_32F);

                                var      matcher = new FlannBasedMatcher();
                                DMatch[] matches = matcher.Match(descriptor1, descriptor2);

                                /*
                                 * using (var view = new Mat())
                                 * using (var window = new Window())
                                 * {
                                 *  Cv2.DrawMatches(img1, keyPoints1, img2, keyPoints2, matches, view);
                                 *  window.ShowImage(view);
                                 *  Cv2.WaitKey();
                                 * }*/
                            }
        }
Beispiel #12
0
        public bool NoValueInScope()
        {
            ORB orb = OrbServices.GetSingleton();

            omg.org.PortableInterceptor.Current current =
                (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
            return(current.get_slot(m_slotId) == null);
        }
        // Use this for initialization
        void Start()
        {
            Texture2D imgTexture = Resources.Load("lena") as Texture2D;

            Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, img1Mat);
            Debug.Log("img1Mat.ToString() " + img1Mat.ToString());

            Mat img2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, img2Mat);
            Debug.Log("img2Mat.ToString() " + img2Mat.ToString());


            float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f;

            Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f);

            Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale);

            Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size());


            ORB detector = ORB.create();

            DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

            MatOfKeyPoint keypoints1   = new MatOfKeyPoint();
            Mat           descriptors1 = new Mat();

            detector.detect(img1Mat, keypoints1);
            extractor.compute(img1Mat, keypoints1, descriptors1);

            MatOfKeyPoint keypoints2   = new MatOfKeyPoint();
            Mat           descriptors2 = new Mat();

            detector.detect(img2Mat, keypoints2);
            extractor.compute(img2Mat, keypoints2, descriptors2);


            DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
            MatOfDMatch       matches = new MatOfDMatch();

            matcher.match(descriptors1, descriptors2, matches);


            Mat resultImg = new Mat();

            Features2d.drawMatches(img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg);


            Texture2D texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(resultImg, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
        void OnFast()
        {
            Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg");
            Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg");

            Mat image1 = new Mat(), image2 = new Mat();

            Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY);
            Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY);
            KeyPoint[] keyPoint1 = Cv2.FAST(image1, 50, true);
            KeyPoint[] keyPoint2 = Cv2.FAST(image2, 50, true);
            using (Mat descriptor1 = new Mat())
                using (Mat descriptor2 = new Mat())
                    using (var orb = ORB.Create(50))
                        using (var matcher = new BFMatcher())
                        {
                            orb.Compute(image1, ref keyPoint1, descriptor1);
                            orb.Compute(image2, ref keyPoint2, descriptor2);
                            Debug.Log(string.Format("keyPoints has {0},{1} items.", keyPoint1.Length, keyPoint2.Length));
                            Debug.Log(string.Format("descriptor has {0},{1} items.", descriptor1.Rows, descriptor2.Rows));

                            List <DMatch> goodMatchePoints = new List <DMatch>();
                            var           dm = matcher.KnnMatch(descriptor1, descriptor2, 2);

                            #region matched 175
                            for (int i = 0; i < dm.Length; i++)
                            {
                                if (dm[i][0].Distance < 0.6 * dm[i][1].Distance)
                                {
                                    goodMatchePoints.Add(dm[i][0]);
                                }
                            }
                            #endregion

                            #region matched 90
                            float minRatio = 1.0f / 1.5f;
                            for (int i = 0; i < dm.Length; i++)
                            {
                                DMatch bestMatch     = dm[i][0];
                                DMatch betterMatch   = dm[i][1];
                                float  distanceRatio = bestMatch.Distance / betterMatch.Distance;
                                if (distanceRatio < minRatio)
                                {
                                    goodMatchePoints.Add(bestMatch);
                                }
                            }
                            #endregion

                            var dstMat = new Mat();
                            Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count));
                            Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat);
                            t2d = Utils.MatToTexture2D(dstMat);
                        }

            Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            SrcSprite.sprite = dst_sp;
        }
Beispiel #15
0
        public void Detect()
        {
            // This parameter should introduce same result of http://opencv.jp/wordpress/wp-content/uploads/lenna_SURF-150x150.png
            using var gray = Image("lenna.png", 0);
            using var orb  = ORB.Create(500);
            var keyPoints = orb.Detect(gray);

            Console.WriteLine($"KeyPoint has {keyPoints.Length} items.");
        }
 public SharedAuthSecret DecodeSharedAuth(byte[] encoded)
 {
     try {
         VersionedData[] versions = DecodeExportedVersions(encoded,
                                                           _magicTagSharedAuth);
         SharedAuthSecretImpl sharedAuth = null;
         for (int i = 0; i < versions.Length; i++)
         {
             // Se houver duas versões, a versão atual virá antes da versão legacy.
             if (versions[i].version == ExportVersion.ConstVal)
             {
                 TypeCode exportedAuthTypeCode =
                     ORB.create_tc_for_type(typeof(ExportedSharedAuth));
                 ExportedSharedAuth exportedAuth =
                     (ExportedSharedAuth)
                     _codec.decode_value(versions[i].encoded, exportedAuthTypeCode);
                 if (sharedAuth == null)
                 {
                     sharedAuth = new SharedAuthSecretImpl(exportedAuth.bus, exportedAuth.attempt, exportedAuth.secret, null);
                 }
                 else
                 {
                     sharedAuth.Attempt = exportedAuth.attempt;
                 }
             }
             if (versions[i].version == CurrentVersion.ConstVal)
             {
                 TypeCode exportedAuthTypeCode =
                     ORB.create_tc_for_type(typeof(core.v2_0.data_export.ExportedSharedAuth));
                 core.v2_0.data_export.ExportedSharedAuth exportedAuth =
                     (core.v2_0.data_export.ExportedSharedAuth)
                     _codec.decode_value(versions[i].encoded, exportedAuthTypeCode);
                 if (sharedAuth == null)
                 {
                     sharedAuth = new SharedAuthSecretImpl(exportedAuth.bus, null, exportedAuth.secret, exportedAuth.attempt);
                 }
                 else
                 {
                     sharedAuth.LegacyAttempt = exportedAuth.attempt;
                 }
             }
         }
         if (sharedAuth != null)
         {
             return(sharedAuth);
         }
     }
     catch (GenericUserException e) {
         const string message =
             "Falha inesperada ao decodificar uma autenticação compartilhada exportada.";
         Logger.Error(message, e);
         throw new InvalidEncodedStreamException(message, e);
     }
     throw new InvalidEncodedStreamException("Versão de autenticação compartilhada incompatível.");
 }
Beispiel #17
0
    /// <summary>
    /// 测试函数
    /// </summary>
    void Detect()
    {
        var gray = new Mat(Application.streamingAssetsPath + "/Textures/p1.jpg", ImreadModes.GrayScale);

        KeyPoint[] keyPoints = null;
        using (var orb = ORB.Create(500))
        {
            keyPoints = orb.Detect(gray);
            Debug.Log($"KeyPoint has {keyPoints.Length} items.");
        }
    }
        static private void CreateORB(
            Mat imgGray,
            KeyPoint[] keypoints,
            out MatOfFloat descriptors)
        {
            descriptors = new MatOfFloat();

            ORB orb1 = ORB.Create();

            orb1.Compute(imgGray, ref keypoints, descriptors);
        }
        void Detect()
        {
            var gray = new Mat(Application.streamingAssetsPath + "/bryce_01.jpg", ImreadModes.GrayScale);

            KeyPoint[] keyPoints = null;
            using (var orb = ORB.Create(500))
            {
                keyPoints = orb.Detect(gray);
                Debug.Log(string.Format("KeyPoint has {0} items.", keyPoints.Length));
            }
        }
Beispiel #20
0
        public void DetectAndCompute()
        {
            using (var gray = Image("lenna.png", ImreadModes.Grayscale))
                using (var orb = ORB.Create(500))
                    using (Mat descriptor = new Mat())
                    {
                        orb.DetectAndCompute(gray, null, out var keyPoints, descriptor);

                        Console.WriteLine($"keyPoints has {keyPoints.Length} items.");
                        Console.WriteLine($"descriptor has {descriptor.Rows} items.");
                    }
        }
Beispiel #21
0
        public System.Int32 TestAddToContextData(System.Int32 arg)
        {
            ORB orb = OrbServices.GetSingleton();

            omg.org.PortableInterceptor.Current current =
                (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
            int contextData = (int)current.get_slot(m_slotId);
            int result      = contextData + arg;

            current.set_slot(m_slotId, result);
            return(result);
        }
        private byte[] EncodeExportedVersions(VersionedData[] exports, byte[] tag)
        {
            TypeCode versionedTypeCode = ORB.create_tc_for_type(typeof(VersionedData));
            TypeCode sequenceTypeCode  = ORB.create_sequence_tc(0, versionedTypeCode);
            Any      any = new Any(exports, sequenceTypeCode);

            byte[] encodedVersions = _codec.encode_value(any);
            byte[] fullEnconding   = new byte[encodedVersions.Length + MagicTagSize];
            Buffer.BlockCopy(tag, 0, fullEnconding, 0, MagicTagSize);
            Buffer.BlockCopy(encodedVersions, 0, fullEnconding, MagicTagSize, encodedVersions.Length);
            return(fullEnconding);
        }
Beispiel #23
0
        static IEnumerable <DMatch> BFMatch(Mat image1, Mat image2)
        {
            Mat dst1 = new Mat();
            Mat dst2 = new Mat();
            var orb  = ORB.Create();

            orb.DetectAndCompute(image1, null, out var kp1, dst1);
            orb.DetectAndCompute(image2, null, out var kp2, dst2);

            BFMatcher matcher = new BFMatcher();

            return(matcher.Match(dst1, dst2));
        }
        internal Current GetPICurrent()
        {
            Current current = ORB.resolve_initial_references("PICurrent") as Current;

            if (current == null)
            {
                const string message =
                    "Falha inesperada ao acessar o slot da thread corrente";
                Logger.Fatal(message);
                throw new OpenBusInternalException(message);
            }
            return(current);
        }
        void DetectAndCompute()
        {
            var gray = new Mat(Application.streamingAssetsPath + "/bryce_01.jpg", ImreadModes.GrayScale);

            KeyPoint[] keyPoints = null;
            using (var orb = ORB.Create(500))
                using (Mat descriptor = new Mat())
                {
                    orb.DetectAndCompute(gray, new Mat(), out keyPoints, descriptor);
                    Debug.Log(string.Format("keyPoints has {0} items.", keyPoints.Length));
                    Debug.Log(string.Format("descriptor has {0} items.", descriptor.Rows));
                }
        }
Beispiel #26
0
    void DetectAndCompute()
    {
        var gray = new Mat(Application.streamingAssetsPath + "/Textures/p1.jpg", ImreadModes.GrayScale);

        KeyPoint[] keyPoints = null;
        using (var orb = ORB.Create(500))
            using (Mat descriptor = new Mat())
            {
                orb.DetectAndCompute(gray, new Mat(), out keyPoints, descriptor);

                Debug.Log($"keyPoints has {keyPoints.Length} items.");
                Debug.Log($"descriptor has {descriptor.Rows} items.");
            }
    }
Beispiel #27
0
 public void receive_request(ServerRequestInfo ri)
 {
     // modify request scope after copy to the thread scope -> must not be propagated to the thread scope.
     if (ri.operation == "TestReceiveReqNotChangeThreadScope")
     {
         ri.set_slot(m_slotId, 2 * (int)ri.get_slot(m_slotId));
     }
     else if (ri.operation == "TestReceiveReqChangeThreadScope")
     {
         ORB orb = OrbServices.GetSingleton();
         omg.org.PortableInterceptor.Current current =
             (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
         current.set_slot(m_slotId, 3 * (int)current.get_slot(m_slotId));
     }
 }
        public override void RunTest()
        {
            using var img1 = new Mat(ImagePath.Match1, ImreadModes.Color);
            using var img2 = new Mat(ImagePath.Match2, ImreadModes.Color);

            using var orb          = ORB.Create(1000);
            using var descriptors1 = new Mat();
            using var descriptors2 = new Mat();
            orb.DetectAndCompute(img1, null, out var keyPoints1, descriptors1);
            orb.DetectAndCompute(img2, null, out var keyPoints2, descriptors2);

            using var bf = new BFMatcher(NormTypes.Hamming, crossCheck: true);
            var matches = bf.Match(descriptors1, descriptors2);

            var goodMatches = matches
                              .OrderBy(x => x.Distance)
                              .Take(10)
                              .ToArray();

            var srcPts = goodMatches.Select(m => keyPoints1[m.QueryIdx].Pt).Select(p => new Point2d(p.X, p.Y));
            var dstPts = goodMatches.Select(m => keyPoints2[m.TrainIdx].Pt).Select(p => new Point2d(p.X, p.Y));

            using var homography = Cv2.FindHomography(srcPts, dstPts, HomographyMethods.Ransac, 5, null);

            int h = img1.Height, w = img1.Width;
            var img2Bounds = new[]
            {
                new Point2d(0, 0),
                new Point2d(0, h - 1),
                new Point2d(w - 1, h - 1),
                new Point2d(w - 1, 0),
            };
            var img2BoundsTransformed = Cv2.PerspectiveTransform(img2Bounds, homography);

            using var view = img2.Clone();
            var drawingPoints = img2BoundsTransformed.Select(p => (Point)p).ToArray();

            Cv2.Polylines(view, new [] { drawingPoints }, true, Scalar.Red, 3);

            using (new Window("view", view))
            {
                Cv2.WaitKey();
            }
        }
        public PatternDetector(bool ratioTest)
        {
            m_detector = ORB.Create(1000);

            m_extractor = ORB.Create(1000);
            //BFMatcher bfMatcher = new BFMatcher(NormTypes.Hamming, true);
            m_matcher = new BFMatcher(NormTypes.Hamming);
            //m_matcher = DescriptorMatcher.Create("BRUTEFORCE_HAMMING");
            enableRatioTest                 = ratioTest;
            enableHomographyRefinement      = true;
            homographyReprojectionThreshold = 3;

            //m_queryKeypoints = new MatOfKeyPoint ();
            m_queryDescriptors = new Mat();
            //m_matches = new MatOfDMatch ();
            //m_knnMatches = new List<MatOfDMatch> ();
            m_grayImg           = new Mat();
            m_warpedImg         = new Mat();
            m_roughHomography   = new Mat();
            m_refinedHomography = new Mat();
        }
Beispiel #30
0
        public void TestNoSlotSet()
        {
            // receive request modifies the thread scope slots
            try {
                int slotId = m_testInterceptorInit.RequestIntercept.SlotId;
                ORB orb    = OrbServices.GetSingleton();
                omg.org.PortableInterceptor.Current current =
                    (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
                current.set_slot(slotId, null);

                System.Boolean result = m_testService.NoValueInScope();
                Assertion.Assert("value in slot", result);

                Assertion.Assert("service context present", !m_testInterceptorInit.RequestIntercept.HasReceivedContextElement);

                current =
                    (omg.org.PortableInterceptor.Current)orb.resolve_initial_references("PICurrent");
                Assertion.AssertNull("slot was set", current.get_slot(slotId));
            } finally {
                m_testInterceptorInit.RequestIntercept.ClearInvocationHistory();
            }
        }
Beispiel #31
0
        public void Run()
        {
            var gray = new Mat(FilePath.Lenna, LoadMode.GrayScale);
            var dst = new Mat(FilePath.Lenna, LoadMode.Color);

            // ORB
            var orb = new ORB(1000);
            KeyPoint[] keypoints = orb.Detect(gray);

            // FREAK
            FREAK freak = new FREAK();
            Mat freakDescriptors = new Mat();
            freak.Compute(gray, ref keypoints, freakDescriptors);

            if (keypoints != null)
            {
                var color = new Scalar(0, 255, 0);
                foreach (KeyPoint kpt in keypoints)
                {
                    float r = kpt.Size / 2;
                    Cv2.Circle(dst, kpt.Pt, (int)r, color, 1, LineType.Link8, 0);
                    Cv2.Line(dst, 
                        new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r), 
                        new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r), 
                        color, 1, LineType.Link8, 0);
                    Cv2.Line(dst, 
                        new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r), 
                        new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r), 
                        color, 1, LineType.Link8, 0);
                }
            }

            using (new Window("FREAK", dst))
            {
                Cv.WaitKey();
            }
        }