Exemplo n.º 1
0
        /// <summary>
        /// Crosses the matcher.
        /// </summary>
        /// <returns>The matcher.</returns>
        /// <param name="queryDescriptors">Query descriptors.</param>
        /// <param name="trainDescriptors">Train descriptors.</param>
        public static IList <DMatch> CrossMatcher(MatOfFloat queryDescriptors, MatOfFloat trainDescriptors)
        {
            MatOfDMatch   matchQT = new MatOfDMatch(), matchTQ = new MatOfDMatch();
            List <DMatch> bmatch = new List <DMatch>();

            DMatch[] dmatch;
            if (trainDescriptors.cols() <= 0)
            {
                throw new ApplicationException("CrossMatcherの引数trainDescriptorsがありません。");
            }
            matcher.match(queryDescriptors, trainDescriptors, matchQT);
            if (queryDescriptors.cols() <= 0)
            {
                throw new ApplicationException("CrossMatcherの引数queryDescriptorsがありません。");
            }
            matcher.match(trainDescriptors, queryDescriptors, matchTQ);
            for (int i = 0; i < matchQT.rows(); i++)
            {
                DMatch forward  = matchQT.toList()[i];
                DMatch backward = matchTQ.toList()[forward.trainIdx];
                if (backward.trainIdx == forward.queryIdx)
                {
                    bmatch.Add(forward);
                }
            }
            dmatch = bmatch.ToArray();
            bmatch.Clear();
            return(dmatch);
        }
Exemplo n.º 2
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            var descriptors1 = new MatOfFloat();
            var descriptors2 = new MatOfFloat();
            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors 
            var bfMatcher = new BFMatcher(NormType.L2, false);
            var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
            using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
            {
                Cv2.WaitKey();
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// 指定したインデックスにあるディスクリプターのコピーを取得します。
        /// </summary>
        /// <param name="index">Index.</param>
        public MatOfFloat Descriptor(int index)
        {
            MatOfFloat m = new MatOfFloat();

            _descriptorL[index].copyTo(m);
            return(m);
        }
Exemplo n.º 4
0
 public static int ORBMatcher(Mat queryMat, Mat trainMat, MatOfKeyPoint queryKeypoints, MatOfKeyPoint trainKeypoints, out IList <DMatch> matches)
 {
     using (MatOfFloat queryDescriptors = new MatOfFloat())
         using (MatOfFloat trainDescriptors = new MatOfFloat())
         {
             queryMat.ORBPointFeature(queryKeypoints, queryDescriptors);
             trainMat.ORBPointFeature(trainKeypoints, trainDescriptors);
             if (queryDescriptors.type() == CvType.CV_8U && trainDescriptors.type() == CvType.CV_8U)
             {
                 matches = Utils.CrossMatcher(queryDescriptors, trainDescriptors);
                 if (matches.Count > 0)
                 {
                     return(0);
                 }
                 else
                 {
                     return(-1);
                 }
             }
             else
             {
                 matches = null;
                 return(-1);
             }
         }
 }
Exemplo n.º 5
0
    private Scalar colorRed = new Scalar(255, 0, 0, 125); // Red color

    void Start()
    {
        for (int i = 0; i < WebCamTexture.devices.Length; i++)
        {
            Debug.Log(WebCamTexture.devices[i].name);
        }

        mCamera       = new WebCamTexture();
        matOpFlowThis = new Mat();
        matOpFlowPrev = new Mat();
        MOPcorners    = new MatOfPoint();
        mMOP2fptsThis = new MatOfPoint2f();
        mMOP2fptsPrev = new MatOfPoint2f();
        mMOP2fptsSafe = new MatOfPoint2f();
        mMOBStatus    = new MatOfByte();
        mMOFerr       = new MatOfFloat();

        mCamera.Play();

        rgbaMat = new Mat(mCamera.height, mCamera.width, CvType.CV_8UC4);
        texture = new Texture2D(mCamera.width, mCamera.height, TextureFormat.RGBA32, false);
        colors  = new Color32[mCamera.width * mCamera.height];

        GetComponent <Renderer>().material.mainTexture = texture;
    }
Exemplo n.º 6
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            SURF surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();

            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);

            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Draw matches
            Mat view = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view);

            using (new Window("SURF matching", WindowMode.AutoSize, view))
            {
                Cv2.WaitKey();
            }
        }
        public Quaternion RotationEstimation(IList <Vector3> KeyVector, MatOfFloat Descriptors, out float MinError)
        {
            if (!initFlag)
            {
                throw new OutLookARException("初期化されていません");
            }
            if (KeyVector.Count != Descriptors.rows() || Descriptors.rows() <= 0)
            {
                throw new OutLookARException("KeyVectorとDescriptorsの数が合いません");
            }
            IList <DMatch> matches;

            matches = Utils.CrossMatcher(MapDescriptors, Descriptors).LowPassFilter();
            if (matches.Count <= 0)
            {
                throw new OutLookARException("マッチしませんでした");
            }
            List <Vector3> FromPointL = new List <Vector3>();
            List <Vector3> ToPointL   = new List <Vector3>();

            foreach (DMatch match in matches)
            {
                FromPointL.Add(MapKeyVector[match.queryIdx]);
                ToPointL.Add(KeyVector[match.trainIdx]);
            }
            return(LMedS(FromPointL, ToPointL, out MinError));
        }
Exemplo n.º 8
0
        //javadoc: NMSBoxesRotated(bboxes, scores, score_threshold, nms_threshold, indices)
        public static void NMSBoxesRotated(MatOfRotatedRect bboxes, MatOfFloat scores, float score_threshold, float nms_threshold, MatOfInt indices)
        {
            if (bboxes != null)
            {
                bboxes.ThrowIfDisposed();
            }
            if (scores != null)
            {
                scores.ThrowIfDisposed();
            }
            if (indices != null)
            {
                indices.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat bboxes_mat  = bboxes;
            Mat scores_mat  = scores;
            Mat indices_mat = indices;
            dnn_Dnn_NMSBoxesRotated_12(bboxes_mat.nativeObj, scores_mat.nativeObj, score_threshold, nms_threshold, indices_mat.nativeObj);

            return;
#else
            return;
#endif
        }
Exemplo n.º 9
0
        private void MatchBySift(Mat src1, Mat src2)
        {
            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            var sift = SIFT.Create();

            // Detect the keypoints and generate their descriptors using SIFT
            KeyPoint[] keypoints1, keypoints2;
            var descriptors1 = new MatOfFloat();
            var descriptors2 = new MatOfFloat();
            sift.Compute(gray1, null, out keypoints1, descriptors1);
            sift.Compute(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher = new BFMatcher(NormTypes.L2, false);
            var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", WindowMode.AutoSize, bfView))
            using (new Window("SIFT matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
            {
                Cv2.WaitKey();
            }
        }
Exemplo n.º 10
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            SURF surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();
            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);
            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Draw matches
            Mat view = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view);

            using (new Window("SURF matching", WindowMode.AutoSize, view))
            {
                Cv2.WaitKey();
            }
        }
Exemplo n.º 11
0
        public void OnCameraViewStarted(int width, int height)
        {
            mIntermediateMat = new Mat();
            mSize0           = new Size();
            mChannels        = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) };
            mBuff            = new float[mHistSizeNum];
            mHistSize        = new MatOfInt(mHistSizeNum);
            mRanges          = new MatOfFloat(0f, 256f);
            mMat0            = new Mat();
            mColorsRGB       = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
            mColorsHue       = new Scalar[] {
                new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
                new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
                new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
                new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
                new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
            };
            mWhilte = Scalar.All(255);
            mP1     = new Point();
            mP2     = new Point();

            // Fill sepia kernel
            mSepiaKernel = new Mat(4, 4, CvType.Cv32f);
            mSepiaKernel.Put(0, 0, /* R */ 0.189f, 0.769f, 0.393f, 0f);
            mSepiaKernel.Put(1, 0, /* G */ 0.168f, 0.686f, 0.349f, 0f);
            mSepiaKernel.Put(2, 0, /* B */ 0.131f, 0.534f, 0.272f, 0f);
            mSepiaKernel.Put(3, 0, /* A */ 0.000f, 0.000f, 0.000f, 1f);
        }
Exemplo n.º 12
0
        //
        // C++:  void cv::text::TextDetector::detect(Mat inputImage, vector_Rect& Bbox, vector_float& confidence)
        //

        //javadoc: TextDetector::detect(inputImage, Bbox, confidence)
        public virtual void detect(Mat inputImage, MatOfRect Bbox, MatOfFloat confidence)
        {
            ThrowIfDisposed();
            if (inputImage != null)
            {
                inputImage.ThrowIfDisposed();
            }
            if (Bbox != null)
            {
                Bbox.ThrowIfDisposed();
            }
            if (confidence != null)
            {
                confidence.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat Bbox_mat       = Bbox;
            Mat confidence_mat = confidence;
            text_TextDetector_detect_10(nativeObj, inputImage.nativeObj, Bbox_mat.nativeObj, confidence_mat.nativeObj);

            return;
#else
            return;
#endif
        }
Exemplo n.º 13
0
        /**
         * Given the {code input} frame, create input blob, run net and return result detections.
         * param classIds Class indexes in result detection.
         * param confidences A set of corresponding confidences.
         * param boxes A set of bounding boxes.
         * param frame automatically generated
         */
        public void detect(Mat frame, MatOfInt classIds, MatOfFloat confidences, MatOfRect boxes)
        {
            ThrowIfDisposed();
            if (frame != null)
            {
                frame.ThrowIfDisposed();
            }
            if (classIds != null)
            {
                classIds.ThrowIfDisposed();
            }
            if (confidences != null)
            {
                confidences.ThrowIfDisposed();
            }
            if (boxes != null)
            {
                boxes.ThrowIfDisposed();
            }
            Mat classIds_mat    = classIds;
            Mat confidences_mat = confidences;
            Mat boxes_mat       = boxes;

            dnn_DetectionModel_detect_12(nativeObj, frame.nativeObj, classIds_mat.nativeObj, confidences_mat.nativeObj, boxes_mat.nativeObj);
        }
Exemplo n.º 14
0
        public static IDictionary <string, MatOfFloat> CreateHashes(IEnumerable <string> pathes, int thumbSize)
        {
            var hashesDict = new ConcurrentDictionary <string, MatOfFloat>();
            var tasks      = new List <Task>();

            foreach (var path in pathes)
            {
                var task = new Task(() =>
                {
                    var sourceMat = new Mat(path);

                    var scale = (double)thumbSize / Max(sourceMat.Width, sourceMat.Height);
                    sourceMat = sourceMat.Resize(new Size(0, 0), scale, scale, InterpolationFlags.Nearest);
                    var gray  = new Mat();


                    Cv2.CvtColor(sourceMat, gray, ColorConversionCodes.BGR2GRAY);

                    var sift = SIFT.Create();

                    var descriptors = new MatOfFloat();

                    Console.WriteLine("Creating hash for " + path);
                    //var keypoints = sift.Detect(gray).Take(KEYPOINTS_NUMBER).ToArray();
                    //sift.Compute(gray, ref keypoints, descriptors);
                    sift.DetectAndCompute(gray, null, out KeyPoint[] keypoints, descriptors);
                    hashesDict.TryAdd(path, descriptors);
                });
                tasks.Add(task);

                task.Start();
            }
            Task.WaitAll(tasks.ToArray());
            return(hashesDict);
        }
        public IEnumerator InitEKF(Mat src)
        {
            IList <Vector3> KeyVector;
            MatOfFloat      Descriptors = new MatOfFloat();

            PointFeature(src, out KeyVector, Descriptors);
            if (KeyVector.Count == Descriptors.rows() && Descriptors.rows() > 0)
            {
                MapKeyVector.AddRange(KeyVector);
                MapDescriptors = Descriptors;
                for (int i = 0; i < KeyVector.Count; i++)
                {
                    MapError.Add(0);
                }
                mEKFStartCoroutine = ekf.AddEnumerator(Quaternion.identity, 0, KeyVector, Descriptors);
                while (mStartCoroutine != null)
                {
                    bool aStatus = mEKFStartCoroutine.MoveNext();
                    if (!aStatus)
                    {
                        Debug.Log("StartCoroutine done.");
                        mStartCoroutine = null;
                    }
                    yield return(null);
                }
                initFlag = true;
            }
        }
Exemplo n.º 16
0
        public static Matrix4 ToKinectMatrix4(this MatOfFloat pose)
        {
            var m1 = new float[4, 4];

            pose.GetArray(0, 0, m1);
            var matrix = new Matrix4();

            matrix.M11 = m1[0, 0];
            matrix.M12 = m1[0, 1];
            matrix.M13 = m1[0, 2];
            matrix.M14 = m1[0, 3];

            matrix.M21 = m1[1, 0];
            matrix.M22 = m1[1, 1];
            matrix.M23 = m1[1, 2];
            matrix.M24 = m1[1, 3];

            matrix.M31 = m1[2, 0];
            matrix.M32 = m1[2, 1];
            matrix.M33 = m1[2, 2];
            matrix.M34 = m1[2, 3];

            matrix.M41 = m1[3, 0];
            matrix.M42 = m1[3, 1];
            matrix.M43 = m1[3, 2];
            matrix.M44 = m1[3, 3];

            return(matrix);
        }
Exemplo n.º 17
0
 void UpdateMat(Mat mat)
 {
     if (State == TrackerType.StandBy)
     {
         _state = TrackerType.Enable;
     }
     if (State == TrackerType.Enable)
     {
         try
         {
             IList <Vector3> KeyVectors;
             MatOfFloat      Descriptors = new MatOfFloat();
             Utils.ORBPointFeature(mat, out KeyVectors, Descriptors);
             if (MapDescriptors == null || MapKeyVectors == null)
             {
                 _state = TrackerType.StandBy;
                 throw new OutLookARException("MapDescriptors & MapKeyVectors :初期化されていません。");
             }
             IList <DMatch> matches;
             Macher(KeyVectors, Descriptors, out matches);
             List <Vector3> FromPointL = new List <Vector3>();
             List <Vector3> ToPointL   = new List <Vector3>();
             foreach (DMatch match in matches)
             {
                 FromPointL.Add(MapKeyVectors[match.queryIdx]);
                 ToPointL.Add(KeyVectors[match.trainIdx]);
             }
         }
         catch (OutLookARException e)
         {
             Debug.Log(e);
         }
     }
 }
Exemplo n.º 18
0
        //
        // C++: static Ptr_BRISK cv::BRISK::create(int thresh, int octaves, vector_float radiusList, vector_int numberList, float dMax = 5.85f, float dMin = 8.2f, vector_int indexChange = std::vector<int>())
        //

        //javadoc: BRISK::create(thresh, octaves, radiusList, numberList, dMax, dMin, indexChange)
        public static BRISK create(int thresh, int octaves, MatOfFloat radiusList, MatOfInt numberList, float dMax, float dMin, MatOfInt indexChange)
        {
            if (radiusList != null)
            {
                radiusList.ThrowIfDisposed();
            }
            if (numberList != null)
            {
                numberList.ThrowIfDisposed();
            }
            if (indexChange != null)
            {
                indexChange.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat   radiusList_mat  = radiusList;
            Mat   numberList_mat  = numberList;
            Mat   indexChange_mat = indexChange;
            BRISK retVal          = BRISK.__fromPtr__(features2d_BRISK_create_10(thresh, octaves, radiusList_mat.nativeObj, numberList_mat.nativeObj, dMax, dMin, indexChange_mat.nativeObj));

            return(retVal);
#else
            return(null);
#endif
        }
        public Quaternion RotationEstimation(Mat src, out float MinError)
        {
            IList <Vector3> KeyVector;
            MatOfFloat      Descriptors = new MatOfFloat();

            PointFeature(src, out KeyVector, Descriptors);
            return(RotationEstimation(KeyVector, Descriptors, out MinError));
        }
        public void SetMap(Mat src)
        {
            IList <Vector3> KeyVector;
            MatOfFloat      Descriptors = new MatOfFloat();

            PointFeature(src, out KeyVector, Descriptors);
            SetMap(KeyVector, Descriptors);
        }
Exemplo n.º 21
0
 public void GetData()
 {
     var imPath = "D:\\PATHTOIMAGE\\Original.jpg";
     var image = Cv2.ImRead(imPath);
     var sift = new SIFT(100);
     KeyPoint[] keypoints;
     MatOfFloat descriptors = new MatOfFloat();
     sift.Run(image, null, out keypoints, descriptors);
 }
 static public void FastDescriptor(
     Mat img,
     out MatOfFloat descriptors,
     out KeyPoint[] keypoints,
     out Mat imgGray)
 {
     FastMatchSample.Fast(img, FastMatchThreshold, out imgGray, out keypoints);
     FastMatchSample.CreateORB(imgGray, keypoints, out descriptors);
 }
Exemplo n.º 23
0
        IEnumerator Init(IList <Vector3> baseKeyVectors, MatOfFloat baseDescriptors, Quaternion baseRotation)
        {
            _ekfModel          = new EKF(ekfError);
            _trackerModel      = new Tracker(confidence, outlier);
            EKFModel.OnUpdate += UpdateEKF;
            yield return(StartCoroutine(EKFModel.AddEnumerator(baseRotation, 0.01f, baseKeyVectors, baseDescriptors)));

            _state = TrackerType.StandBy;
        }
        static public void Match(
            MatOfFloat descriptors1,
            MatOfFloat descriptors2,
            int knnLevel,
            out DMatch[][] matches)
        {
            var bfMatcher = new BFMatcher(NormTypes.L2SQR, false);

            matches = bfMatcher.KnnMatch(descriptors1, descriptors2, knnLevel);
        }
Exemplo n.º 25
0
        public void MatOfFloatFromArray()
        {
            var array = new float[] { 7, 8, 9 };
            var m     = MatOfFloat.FromArray(array);

            for (int i = 0; i < array.Length; i++)
            {
                Assert.That(m.Get <float>(i), Is.EqualTo(array[i]).Within(1e-6));
            }
        }
Exemplo n.º 26
0
        public void GetData()
        {
            var imPath = "D:\\PATHTOIMAGE\\Original.jpg";
            var image  = Cv2.ImRead(imPath);
            var sift   = new SIFT(100);

            KeyPoint[] keypoints;
            MatOfFloat descriptors = new MatOfFloat();

            sift.Run(image, null, out keypoints, descriptors);
        }
        static private void CreateORB(
            Mat imgGray,
            KeyPoint[] keypoints,
            out MatOfFloat descriptors)
        {
            descriptors = new MatOfFloat();

            ORB orb1 = ORB.Create();

            orb1.Compute(imgGray, ref keypoints, descriptors);
        }
Exemplo n.º 28
0
        //
        // C++:  void cv::xfeatures2d::PCTSignatures::setWeights(vector_float weights)
        //

        /**
         * Weights (multiplicative constants) that linearly stretch individual axes of the feature space.
         * param weights Values of all weights.
         * <b>Note:</b>
         * WEIGHT_IDX = 0;
         * X_IDX = 1;
         * Y_IDX = 2;
         * L_IDX = 3;
         * A_IDX = 4;
         * B_IDX = 5;
         * CONTRAST_IDX = 6;
         * ENTROPY_IDX = 7;
         */
        public void setWeights(MatOfFloat weights)
        {
            ThrowIfDisposed();
            if (weights != null)
            {
                weights.ThrowIfDisposed();
            }
            Mat weights_mat = weights;

            xfeatures2d_PCTSignatures_setWeights_10(nativeObj, weights_mat.nativeObj);
        }
Exemplo n.º 29
0
    IEnumerator Init()
    {
        yield return(new WaitForSeconds(1));

        IsStarted          = true;
        webCamTextureToMat = FindObjectOfType <WebCamTextureToMat>() as WebCamTextureToMat;
        Debug.Log("MotionDetectInited");


        Mat webCamTextureMat = webCamTextureToMat.GetMat();

        Debug.Log("webCamTextureMat -- c : " + webCamTextureMat.cols() + " r : " + webCamTextureMat.rows());
        //Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

        colors  = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()];
        texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);

        matOpFlowThis = new Mat();
        matOpFlowPrev = new Mat();
        MOPcorners    = new MatOfPoint();
        mMOP2fptsThis = new MatOfPoint2f();
        mMOP2fptsPrev = new MatOfPoint2f();
        mMOP2fptsSafe = new MatOfPoint2f();
        mMOBStatus    = new MatOfByte();
        mMOFerr       = new MatOfFloat();


        gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

        Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

        float width  = 0;
        float height = 0;

        width  = gameObject.transform.localScale.x;
        height = gameObject.transform.localScale.y;

        float widthScale  = (float)Screen.width / width;
        float heightScale = (float)Screen.height / height;

        if (widthScale < heightScale)
        {
            Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
        }
        else
        {
            Camera.main.orthographicSize = height / 2;
        }

        gameObject.GetComponent <Renderer>().material.mainTexture = texture;

        //          webCamTextureToMatHelper.Play ();
        yield return(null);
    }
Exemplo n.º 30
0
        //
        // C++:  void cv::xfeatures2d::PCTSignatures::setTranslations(vector_float translations)
        //

        /**
         * Translations of the individual axes of the feature space.
         * param translations Values of all translations.
         * <b>Note:</b>
         * WEIGHT_IDX = 0;
         * X_IDX = 1;
         * Y_IDX = 2;
         * L_IDX = 3;
         * A_IDX = 4;
         * B_IDX = 5;
         * CONTRAST_IDX = 6;
         * ENTROPY_IDX = 7;
         */
        public void setTranslations(MatOfFloat translations)
        {
            ThrowIfDisposed();
            if (translations != null)
            {
                translations.ThrowIfDisposed();
            }
            Mat translations_mat = translations;

            xfeatures2d_PCTSignatures_setTranslations_10(nativeObj, translations_mat.nativeObj);
        }
Exemplo n.º 31
0
        //
        // C++:  vector_float cv::saliency::ObjectnessBING::getobjectnessValues()
        //

        //javadoc: ObjectnessBING::getobjectnessValues()
        public MatOfFloat getobjectnessValues()
        {
            ThrowIfDisposed();
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            MatOfFloat retVal = MatOfFloat.fromNativeAddr(saliency_ObjectnessBING_getobjectnessValues_10(nativeObj));

            return(retVal);
#else
            return(null);
#endif
        }
Exemplo n.º 32
0
        private void Start()
        {
            var src    = Util.LoadTexture("imori_256x256");
            var dst    = new Mat();
            var kernel = new MatOfFloat(
                1f / 3f, 0f, 0f,
                0f, 1f / 3f, 0f,
                0f, 0f, 1f / 3f);

            Imgproc.filter2D(src, dst, -1, kernel);
            GetComponent <Renderer>().material.mainTexture = Util.MatToTexture2D(dst);
        }
Exemplo n.º 33
0
 public EKF(Vector3 error)
 {
     RotationError = error;
     // init
     _varianceCovarianceMatrix = Mat.zeros(4, 4, Type);
     _stateMatrix = new Mat(4, 1, Type);
     _stateMatrix.put(0, 0, 0);
     _stateMatrix.put(1, 0, 0);
     _stateMatrix.put(2, 0, 0);
     _stateMatrix.put(3, 0, 1);
     _stateDescriptors = new MatOfFloat(0, 0, Type);
 }
        static public void SiftDescriptor(
            Mat img,
            out MatOfFloat descriptors,
            out KeyPoint[] keypoints,
            out Mat imgGray)
        {
            imgGray = new Mat();
            Cv2.CvtColor(img, imgGray, ColorConversionCodes.BGR2GRAY);

            descriptors = new MatOfFloat();
            instance.Sift.DetectAndCompute(imgGray, null, out keypoints, descriptors);
        }
Exemplo n.º 35
0
        private static void Surf(IplImage img1, IplImage img2)
        {
            Mat src = new Mat(img1, true);
            Mat src2 = new Mat(img2, true);
            //Detect the keypoints and generate their descriptors using SURF
            SURF surf = new SURF(500, 4, 2, true);
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();
            surf.Run(src, null, out keypoints1, descriptors1);
            surf.Run(src2, null, out keypoints2, descriptors2);
            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);
            DMatch[] matches = matcher.Match(descriptors1, descriptors2);//例外が発生する箇所
            Mat view = new Mat();
            Cv2.DrawMatches(src, keypoints1, src2, keypoints2, matches, view);

            Window.ShowImages(view);
        }
Exemplo n.º 36
0
 /// <summary>
 /// Compute the BRISK features and descriptors on an image
 /// </summary>
 /// <param name="image"></param>
 /// <param name="mask"></param>
 /// <param name="keyPoints"></param>
 /// <param name="descriptors"></param>
 /// <param name="useProvidedKeypoints"></param>
 public void Run(InputArray image, InputArray mask, out KeyPoint[] keyPoints,
     out float[] descriptors, bool useProvidedKeypoints = false)
 {
     MatOfFloat descriptorsMat = new MatOfFloat();
     Run(image, mask, out keyPoints, descriptorsMat, useProvidedKeypoints);
     descriptors = descriptorsMat.ToArray();
 }
Exemplo n.º 37
0
 /// <summary>
 /// Fits line to the set of 3D points using M-estimator algorithm.
 /// The input is vector of 3D points.
 /// </summary>
 /// <param name="distType">Distance used by the M-estimator</param>
 /// <param name="param">Numerical parameter ( C ) for some types of distances. 
 /// If it is 0, an optimal value is chosen.</param>
 /// <param name="reps">Sufficient accuracy for the radius 
 /// (distance between the coordinate origin and the line).</param>
 /// <param name="aeps">Sufficient accuracy for the angle. 
 /// 0.01 would be a good default value for reps and aeps.</param>
 /// <returns>Output line parameters.</returns>
 public CvLine3D FitLine3D(DistanceType distType, double param, double reps, double aeps)
 {
     var line = new MatOfFloat();
     Cv2.FitLine(this, line, distType, param, reps, aeps);
     return new CvLine3D(line.ToArray());
 }
Exemplo n.º 38
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = webCamTexture.width / 2;
                                        #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                if (mMOP2fptsPrev.rows () == 0) {

                                        // first time through the loop so we need prev and this mats
                                        // plus prev points
                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // copy that to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get prev corners
                                        Imgproc.goodFeaturesToTrack (matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsPrev.fromArray (MOPcorners.toArray ());

                                        // get safe copy of this corners
                                        mMOP2fptsPrev.copyTo (mMOP2fptsSafe);
                                } else {
                                        // we've been through before so
                                        // this mat is valid. Copy it to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // get the corners for this mat
                                        Imgproc.goodFeaturesToTrack (matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsThis.fromArray (MOPcorners.toArray ());

                                        // retrieve the corners from the prev mat
                                        // (saves calculating them again)
                                        mMOP2fptsSafe.copyTo (mMOP2fptsPrev);

                                        // and save this corners for next time through

                                        mMOP2fptsThis.copyTo (mMOP2fptsSafe);
                                }

                                /*
            Parameters:
            prevImg first 8-bit input image
            nextImg second input image
            prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
            nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
            status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
            err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
            */
                                Video.calcOpticalFlowPyrLK (matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                                if (!mMOBStatus.empty ()) {
                                        List<Point> cornersPrev = mMOP2fptsPrev.toList ();
                                        List<Point> cornersThis = mMOP2fptsThis.toList ();
                                        List<byte> byteStatus = mMOBStatus.toList ();

                                        int x = 0;
                                        int y = byteStatus.Count - 1;

                                        for (x = 0; x < y; x++) {
                                                if (byteStatus [x] == 1) {
                                                        Point pt = cornersThis [x];
                                                        Point pt2 = cornersPrev [x];

                                                        Core.circle (rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                                                        Core.line (rgbaMat, pt, pt2, colorRed, iLineThickness);
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }
            }
        }
Exemplo n.º 39
0
        static int VoteForSizeAndOrientation(KeyPoint[] modelKeyPoints, KeyPoint[] observedKeyPoints, DMatch[][] matches, Mat mask, float scaleIncrement, int rotationBins)
        {
            int idx = 0;
            int nonZeroCount = 0;
            byte[] maskMat = new byte[mask.Rows];
            GCHandle maskHandle = GCHandle.Alloc(maskMat, GCHandleType.Pinned);
            using (Mat m = new Mat(mask.Rows, 1, MatType.CV_8U, maskHandle.AddrOfPinnedObject()))
            {
                mask.CopyTo(m);
                List<float> logScale = new List<float>();
                List<float> rotations = new List<float>();
                double s, maxS, minS, r;
                maxS = -1.0e-10f; minS = 1.0e10f;

                //if you get an exception here, it's because you're passing in the model and observed keypoints backwards.  Just switch the order.
                for (int i = 0; i < maskMat.Length; i++)
                {
                    if (maskMat[i] > 0)
                    {
                        KeyPoint observedKeyPoint = observedKeyPoints[i];
                        KeyPoint modelKeyPoint = modelKeyPoints[matches[i][0].TrainIdx];
                        s = Math.Log10(observedKeyPoint.Size / modelKeyPoint.Size);
                        logScale.Add((float)s);
                        maxS = s > maxS ? s : maxS;
                        minS = s < minS ? s : minS;

                        r = observedKeyPoint.Angle - modelKeyPoint.Angle;
                        r = r < 0.0f ? r + 360.0f : r;
                        rotations.Add((float)r);
                    }
                }

                int scaleBinSize = (int)Math.Ceiling((maxS - minS) / Math.Log10(scaleIncrement));
                if (scaleBinSize < 2)
                    scaleBinSize = 2;
                float[] scaleRanges = { (float)minS, (float)(minS + scaleBinSize + Math.Log10(scaleIncrement)) };

                using (MatOfFloat scalesMat = new MatOfFloat(rows: logScale.Count, cols: 1, data: logScale.ToArray()))
                using (MatOfFloat rotationsMat = new MatOfFloat(rows: rotations.Count, cols: 1, data: rotations.ToArray()))
                using (MatOfFloat flagsMat = new MatOfFloat(logScale.Count, 1))
                using (Mat hist = new Mat())
                {
                    flagsMat.SetTo(new Scalar(0.0f));
                    float[] flagsMatFloat1 = flagsMat.ToArray();

                    int[] histSize = { scaleBinSize, rotationBins };
                    float[] rotationRanges = { 0.0f, 360.0f };
                    int[] channels = { 0, 1 };
                    Rangef[] ranges = { new Rangef(scaleRanges[0], scaleRanges[1]), new Rangef(rotations.Min(), rotations.Max()) };
                    double minVal, maxVal;

                    Mat[] arrs = { scalesMat, rotationsMat };
                    Cv2.CalcHist(arrs, channels, null, hist, 2, histSize, ranges);
                    Cv2.MinMaxLoc(hist, out minVal, out maxVal);

                    Cv2.Threshold(hist, hist, maxVal * 0.5, 0, ThresholdTypes.Tozero);
                    Cv2.CalcBackProject(arrs, channels, hist, flagsMat, ranges);

                    MatIndexer<float> flagsMatIndexer = flagsMat.GetIndexer();

                    for (int i = 0; i < maskMat.Length; i++)
                    {
                        if (maskMat[i] > 0)
                        {
                            if (flagsMatIndexer[idx++] != 0.0f)
                            {
                                nonZeroCount++;
                            }
                            else
                                maskMat[i] = 0;
                        }
                    }
                    m.CopyTo(mask);
                }
            }
            maskHandle.Free();

            return nonZeroCount;
        }
Exemplo n.º 40
0
        /// <summary>
        /// keypoint を検出し,その SIFT ディスクリプタを計算します.
        /// </summary>
        /// <param name="img">Input 8-bit grayscale image</param>
        /// <param name="mask">Optional input mask that marks the regions where we should detect features.</param>
        /// <param name="keypoints">The input/output vector of keypoints</param>
        /// <param name="descriptors">The output matrix of descriptors. </param>
        /// <param name="useProvidedKeypoints">Boolean flag. If it is true, the keypoint detector is not run. 
        /// Instead, the provided vector of keypoints is used and the algorithm just computes their descriptors.</param>
#else
        /// <summary>
        /// detects keypoints and computes the SIFT descriptors for them. 
        /// </summary>
        /// <param name="img">Input 8-bit grayscale image</param>
        /// <param name="mask">Optional input mask that marks the regions where we should detect features.</param>
        /// <param name="keypoints">The input/output vector of keypoints</param>
        /// <param name="descriptors">The output matrix of descriptors. </param>
        /// <param name="useProvidedKeypoints">Boolean flag. If it is true, the keypoint detector is not run. 
        /// Instead, the provided vector of keypoints is used and the algorithm just computes their descriptors.</param>
#endif
        public void Run(InputArray img, InputArray mask, out KeyPoint[] keypoints, out float[] descriptors,
            bool useProvidedKeypoints = false)
        {
            // SIFTは std::vector<float> でdescriptorを受け取れないっぽいので、自前実装
            MatOfFloat descriptorsMat = new MatOfFloat();
            Run(img, mask, out keypoints, descriptorsMat, useProvidedKeypoints);

            descriptors = descriptorsMat.ToArray();
        }
Exemplo n.º 41
0
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log ("OnWebCamTextureToMatHelperInited");

                        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

                        colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
                        texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

                        matOpFlowThis = new Mat ();
                        matOpFlowPrev = new Mat ();
                        MOPcorners = new MatOfPoint ();
                        mMOP2fptsThis = new MatOfPoint2f ();
                        mMOP2fptsPrev = new MatOfPoint2f ();
                        mMOP2fptsSafe = new MatOfPoint2f ();
                        mMOBStatus = new MatOfByte ();
                        mMOFerr = new MatOfFloat ();

                        gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

                        Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

                        float width = 0;
                        float height = 0;

                        width = gameObject.transform.localScale.x;
                        height = gameObject.transform.localScale.y;

                        float widthScale = (float)Screen.width / width;
                        float heightScale = (float)Screen.height / height;
                        if (widthScale < heightScale) {
                                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                        } else {
                                Camera.main.orthographicSize = height / 2;
                        }

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        //			webCamTextureToMatHelper.Play ();
        }
Exemplo n.º 42
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                                        while (webCamTexture.width <= 16) {
                                                webCamTexture.GetPixels32 ();
                                                yield return new WaitForEndOfFrame ();
                                        }
                                        #endif
                                #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        updateLayout ();

                                        screenOrientation = Screen.orientation;
                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }
Exemplo n.º 43
0
 /// <summary>
 /// computes the distance transform map
 /// </summary>
 /// <param name="distanceType"></param>
 /// <param name="maskSize"></param>
 public MatOfFloat DistanceTransform(DistanceType distanceType, DistanceMaskSize maskSize)
 {
     var dst = new MatOfFloat();
     Cv2.DistanceTransform(this, dst, distanceType, maskSize);
     return dst;
 }