Ejemplo n.º 1
0
        /**
         * 4ポイント限定のHomographyPointsGeometricallyConsistent関数
         * @param H
         * @param i_width
         * @param i_height
         * @return
         */
        override public bool geometricallyConsistent(HomographyMat H)
        {
            NyARDoublePoint2d[] x  = this._x;
            NyARDoublePoint2d[] xp = this._xp;

            //MultiplyPointHomographyInhomogenous
            for (int i = 0; i < 4; i++)
            {
                double sx = x[i].x;
                double sy = x[i].y;
                double w  = H.m20 * sx + H.m21 * sy + H.m22;
                xp[i].x = (H.m00 * sx + H.m01 * sy + H.m02) / w;
                xp[i].y = (H.m10 * sx + H.m11 * sy + H.m12) / w;
            }

            if (!Homography3PointsGeometricallyConsistent(x[0], x[1], x[2], xp[0], xp[1], xp[2]))
            {
                return(false);
            }
            if (!Homography3PointsGeometricallyConsistent(x[1], x[2], x[3], xp[1], xp[2], xp[3]))
            {
                return(false);
            }
            if (!Homography3PointsGeometricallyConsistent(x[2], x[3], x[0], xp[2], xp[3], xp[0]))
            {
                return(false);
            }
            if (!Homography3PointsGeometricallyConsistent(x[3], x[0], x[1], xp[3], xp[0], xp[1]))
            {
                return(false);
            }
            return(true);
        }
Ejemplo n.º 2
0
        private bool query(FreakFeaturePointStack query_keyframe, KeyframeMap i_keymap, FeaturePairStack i_result)
        {
            // mMatchedInliers.clear();
            HomographyMat        H    = this._H;
            InverseHomographyMat hinv = this._hinv;

            hinv = new InverseHomographyMat_O1();

            int num_of_query_frame = query_keyframe.getLength();

            //ワークエリアの設定
            if (num_of_query_frame > this._tmp_pair_stack[0].getArraySize())
            {
                this._tmp_pair_stack[0] = new FeaturePairStack(num_of_query_frame + 10);
                this._tmp_pair_stack[1] = new FeaturePairStack(num_of_query_frame + 10);
            }
            int tmp_ch       = 0;
            int last_inliers = 0;

            foreach (KeyValuePair <int, Keyframe> i in i_keymap)
            {
                Keyframe second = i.Value;
                FreakMatchPointSetStack ref_points = second.getFeaturePointSet();
                //新しいワークエリアを作る。
                FeaturePairStack match_result = this._tmp_pair_stack[tmp_ch];
                //ワークエリア初期化
                match_result.clear();

                //特徴量同士のマッチング
                if (this._matcher.match(query_keyframe, second, match_result) < this.mMinNumInliers)
                {
                    continue;
                }

                // Vote for a transformation based on the correspondences
                if (!this.mHoughSimilarityVoting.extractMatches(match_result, second.width(), second.height()))
                {
                    continue;
                }

                // Estimate the transformation between the two images
                if (!this.mRobustHomography.PreemptiveRobustHomography(H, match_result, second.width(), second.height()))
                {
                    continue;
                }

                //ここでHInv計算
                if (!hinv.inverse(H))
                {
                    continue;
                }

                // Apply some heuristics to the homography
                if (!hinv.checkHomographyHeuristics(second.width(), second.height()))
                {
                    continue;
                }

                // Find the inliers
                this._find_inliner.extructMatches(H, match_result);
                if (match_result.getLength() < mMinNumInliers)
                {
                    continue;
                }

                //
                // Use the estimated homography to find more inliers
                match_result.clear();
                if (_matcher.match(query_keyframe, ref_points, hinv, 10, match_result) < mMinNumInliers)
                {
                    continue;
                }

                //
                // Vote for a similarity with new matches
                if (!this.mHoughSimilarityVoting.extractMatches(match_result, second.width(), second.height()))
                {
                    continue;
                }

                //
                // Re-estimate the homography
                if (!this.mRobustHomography.PreemptiveRobustHomography(H, match_result, second.width(), second.height()))
                {
                    continue;
                }
                // Apply some heuristics to the homography
                if (!hinv.inverse(H))
                {
                    continue;
                }
                if (!hinv.checkHomographyHeuristics(second.width(), second.height()))
                {
                    continue;
                }
                //
                // Check if this is the best match based on number of inliers
                this._find_inliner.extructMatches(H, match_result);

                //ポイント数が最小値より大きい&&最高成績ならテンポラリチャンネルを差し替える。
                if (match_result.getLength() >= mMinNumInliers && match_result.getLength() > last_inliers)
                {
                    //出力チャンネルを切り替え
                    tmp_ch       = (tmp_ch + 1) % 2;
                    last_inliers = match_result.getLength();
                }
            }
            //出力は last_inlines>0の場合に[(tmp_ch+1)%2]にある。
            if (last_inliers <= 0)
            {
                return(false);
            }
            {
                FeaturePairStack        match_result = this._tmp_pair_stack[(tmp_ch + 1) % 2];
                FeaturePairStack.Item[] dest         = match_result.getArray();
                for (int i = 0; i < match_result.getLength(); i++)
                {
                    FeaturePairStack.Item t = i_result.prePush();
                    if (t == null)
                    {
                        System.Console.WriteLine("Push overflow!");
                        break;
                    }
                    t.query = dest[i].query;
                    t.ref_  = dest[i].ref_;
                }
            }
            return(true);
        }
        /**
         * SolveHomography4Points
         * Solve for the homography given 4 point correspondences.
         */
        override public bool solveHomography4Points(FeaturePairStack.Item p1, FeaturePairStack.Item p2, FeaturePairStack.Item p3, FeaturePairStack.Item p4, HomographyMat H)
        {
            NyARDoublePoint2d[] a1 = this._a1;
            NyARDoublePoint2d[] a2 = this._a2;
            a1[0] = p1.ref_; a1[1] = p2.ref_; a1[2] = p3.ref_; a1[3] = p4.ref_;
            a2[0] = p1.query; a2[1] = p2.query; a2[2] = p3.query; a2[3] = p4.query;
            double mus1_0 = 0;
            double mus1_1 = 0;
            double mus2_0 = 0;
            double mus2_1 = 0;
            double s1, s2;

            {
                /**
                 * Condition four 2D points such that the mean is zero and the standard
                 * deviation is sqrt(2).
                 * condition4Points2d+AddHomographyPointContraint function
                 */
                //condition4Points2d(a1,a2,this._mat_A);
                for (int i = 0; i < 4; i++)
                {
                    mus1_0 += a1[i].x;
                    mus1_1 += a1[i].y;
                    mus2_0 += a2[i].x;
                    mus2_1 += a2[i].y;
                }
                mus1_0 /= 4;
                mus1_1 /= 4;
                mus2_0 /= 4;
                mus2_1 /= 4;
                double dw1 = 0;
                double dw2 = 0;
                for (int i = 0; i < 4; i++)
                {
                    double[] l;
                    double   X1 = a1[i].x - mus1_0;
                    double   Y1 = a1[i].y - mus1_1;
                    double   X2 = a2[i].x - mus2_0;
                    double   Y2 = a2[i].y - mus2_1;
                    dw2 += Math.Sqrt(X2 * X2 + Y2 * Y2);
                    dw1 += Math.Sqrt(X1 * X1 + Y1 * Y1);
                    l    = this._mat_A[i * 2];
                    l[0] = -X1;
                    l[1] = -Y1;
                    l[6] = X2 * X1;
                    l[7] = X2 * Y1;
                    l[8] = X2;
                    l    = this._mat_A[i * 2 + 1];
                    l[3] = -X1;
                    l[4] = -Y1;
                    l[6] = Y2 * X1;
                    l[7] = Y2 * Y1;
                    l[8] = Y2;
                }
                if (dw1 * dw2 == 0)
                {
                    return(false);
                }
                s1 = (4 / dw1) * SQRT2;
                s2 = (4 / dw2) * SQRT2;
                //			double ss=s1*s2;
                for (int i = 0; i < 4; i++)
                {
                    double[] l;
                    l    = this._mat_A[i * 2];
                    l[6] = (-l[0] * s1) * (l[8] * s2); //emulation calculation order.
                    l[7] = (-l[1] * s1) * (l[8] * s2); //emulation calculation order.
                    //				l[6]*=ss;
                    //				l[7]*=ss;
                    l[0] *= s1;
                    l[1] *= s1;
                    l[2]  = -1;
                    l[3]  = l[4] = l[5] = 0;
                    l[8] *= s2;
                    l     = this._mat_A[i * 2 + 1];
                    l[6]  = (-l[3] * s1) * (l[8] * s2); //emulation calculation order.
                    l[7]  = (-l[4] * s1) * (l[8] * s2); //emulation calculation order.
                    //				l[6]*=ss;
                    //				l[7]*=ss;
                    l[0]  = l[1] = l[2] = 0;
                    l[3] *= s1;
                    l[4] *= s1;
                    l[5]  = -1;
                    l[8] *= s2;
                }
            }

            if (!this.solveHomography4PointsInhomogenous(H))
            {
                return(false);
            }


            //
            // Denomalize the computed homography
            //
            {   //H.denormalizeHomography(ts, tps);
                double stx = s1 * mus1_0;
                double sty = s1 * mus1_1;

                double apc = (H.m20 * mus2_0) + (H.m00 / s2);
                double bpd = (H.m21 * mus2_0) + (H.m01 / s2);
                H.m00 = s1 * apc;
                H.m01 = s1 * bpd;
                H.m02 = H.m22 * mus2_0 + H.m02 / s2 - stx * apc - sty * bpd;

                double epg = (H.m20 * mus2_1) + (H.m10 / s2);
                double fph = (H.m21 * mus2_1) + (H.m11 / s2);
                H.m10 = s1 * epg;
                H.m11 = s1 * fph;
                H.m12 = H.m22 * mus2_1 + H.m12 / s2 - stx * epg - sty * fph;

                H.m20 = H.m20 * s1;
                H.m21 = H.m21 * s1;
                H.m22 = H.m22 - H.m20 * mus1_0 - H.m21 * mus1_1;
            }



            return(true);
        }
        /**
         * Robustly solve for the homography given a set of correspondences.
         * p=src=ref,q=dest=query
         */
        public bool PreemptiveRobustHomography(HomographyMat H, FeaturePairStack matches, double i_width, double i_height)
        {
            RansacPointTable ps = this._ps;

            //		int[] tmp_i=new int[2 * num_points];
            HomographyMat[] hyp                = this.mHyp;/* 9*max_num_hypotheses */
            CostPair[]      hyp_costs          = this.mHypCosts;
            double          scale              = this.mCauchyScale;
            int             max_num_hypotheses = this.mMaxNumHypotheses;
            int             max_trials         = this.mMaxTrials;


            double one_over_scale2 = 1 / (scale * scale);

            int num_hypotheses_remaining;
            int cur_chunk_size, this_chunk_end;
            int sample_size = 4;

            this._homography_check.setTestWindow(i_width, i_height);

            int num_points = matches.getLength();

            // We need at least SAMPLE_SIZE points to sample from
            if (num_points < sample_size)
            {
                return(false);
            }

            // seed = 1234;
            ps.setSequential(matches.getArray(), num_points);
            ps.setSeed(1234);
            // Shuffle the indices
            ps.shuffle(num_points);

            int num_hypotheses = 0;

            // Compute a set of hypotheses
            for (int trial = 0; trial < max_trials; trial++)
            {
                // Shuffle the first SAMPLE_SIZE indices
                ps.shuffle(sample_size);

                // Check if the 4 points are geometrically valid
                if (!ps.geometricallyConsistent4Point())
                {
                    continue;
                }

                // Compute the homography
                if (!this._homography_solver.solveHomography4Points(ps.indics[0], ps.indics[1], ps.indics[2], ps.indics[3], hyp[num_hypotheses]))
                {
                    continue;
                }
                // Check the test points
                if (!this._homography_check.geometricallyConsistent(hyp[num_hypotheses]))
                {
                    continue;
                }
                num_hypotheses++;
                if (num_hypotheses == max_num_hypotheses)
                {
                    break;
                }
            }

            // We fail if no hypotheses could be computed
            if (num_hypotheses == 0)
            {
                return(false);
            }

            // Initialize the hypotheses costs
            for (int i = 0; i < num_hypotheses; i++)
            {
                hyp_costs[i].first  = 0;
                hyp_costs[i].second = i;
            }

            num_hypotheses_remaining = num_hypotheses;

            //チャンクサイズの決定
            int chunk_size = this.mChunkSize;

            if (chunk_size > num_points)
            {
                chunk_size = num_points;
            }
            cur_chunk_size = chunk_size;

            for (int i = 0; i < num_points && num_hypotheses_remaining > 2; i += cur_chunk_size)
            {
                // Size of the current chunk
                cur_chunk_size = (chunk_size < num_points - i) ? chunk_size : (num_points - i);

                // End of the current chunk
                this_chunk_end = i + cur_chunk_size;

                // Score each of the remaining hypotheses
                for (int j = 0; j < num_hypotheses_remaining; j++)
                {
                    // const T* H_cur = &hyp[hyp_costs[j].second*9];
                    double hf = hyp[hyp_costs[j].second].cauchyProjectiveReprojectionCostSum(ps.indics, i, this_chunk_end, one_over_scale2);
                    hyp_costs[j].first = hf;
                }
                // Cut out half of the hypotheses
                FastMedian(hyp_costs, num_hypotheses_remaining);
                num_hypotheses_remaining = num_hypotheses_remaining >> 1;
            }

            // Find the best hypothesis
            int    min_index = hyp_costs[0].second;
            double min_cost  = hyp_costs[0].first;

            for (int i = 1; i < num_hypotheses_remaining; i++)
            {
                if (hyp_costs[i].first < min_cost)
                {
                    min_cost  = hyp_costs[i].first;
                    min_index = hyp_costs[i].second;
                }
            }

            // Move the best hypothesis
            H.setValue(hyp[min_index]);
            H.normalizeHomography();

            return(true);
        }
Ejemplo n.º 5
0
        /**
         * 4ポイント限定のHomographyPointsGeometricallyConsistent関数
         * @param H
         * @param i_width
         * @param i_height
         * @return
         */
        virtual public bool geometricallyConsistent(HomographyMat H)
        {
            NyARDoublePoint2d[] x = NyARDoublePoint2d.createArray(4);
            x[0].x = 0;
            x[0].y = 0;
            x[1].x = this._w;
            x[1].y = 0;
            x[2].x = this._w;
            x[2].y = this._h;
            x[3].x = 0;
            x[3].y = this._h;
            NyARDoublePoint2d xp1       = new NyARDoublePoint2d();
            NyARDoublePoint2d xp2       = new NyARDoublePoint2d();
            NyARDoublePoint2d xp3       = new NyARDoublePoint2d();
            NyARDoublePoint2d first_xp1 = new NyARDoublePoint2d();
            NyARDoublePoint2d first_xp2 = new NyARDoublePoint2d();


            int x1_ptr = 0;
            //        int x2_ptr = 0+1;
            //        int x3_ptr = 0+2;

            NyARDoublePoint2d xp1_ptr = xp1;
            NyARDoublePoint2d xp2_ptr = xp2;
            NyARDoublePoint2d xp3_ptr = xp3;

            //
            // Check the first 3 points
            //
            MultiplyPointHomographyInhomogenous(xp1, H, x[x1_ptr]);
            MultiplyPointHomographyInhomogenous(xp2, H, x[x1_ptr + 1]);
            MultiplyPointHomographyInhomogenous(xp3, H, x[x1_ptr + 2]);

            first_xp1.setValue(xp1); //indexing.CopyVector2(first_xp1,0, xp1,0);
            first_xp2.setValue(xp2); //indexing.CopyVector2(first_xp2,0, xp2,0);
            //      public boolean Homography4PointsGeometricallyConsistent(float[] x1, float[] x2, float[] x3, float[] x4,float[] x1p,float[] x2p,float[] x3p,float[] x4p) {


            if (!Homography3PointsGeometricallyConsistent(
                    x[x1_ptr], x[x1_ptr + 1], x[x1_ptr + 2],
                    xp1_ptr, xp2_ptr, xp3_ptr))
            {
                return(false);
            }

            //
            // Check the remaining points
            //

            for (int i = 3; i < x.Length; i++)
            {
                x1_ptr += 1;
                MultiplyPointHomographyInhomogenous(xp1_ptr, H, x[x1_ptr + 2]);

                NyARDoublePoint2d tmp_ptr = xp1_ptr;
                xp1_ptr = xp2_ptr;
                xp2_ptr = xp3_ptr;
                xp3_ptr = tmp_ptr;

                if (!Homography3PointsGeometricallyConsistent(
                        x[x1_ptr], x[x1_ptr + 1], x[x1_ptr + 2],
                        xp1_ptr, xp2_ptr, xp3_ptr))
                {
                    return(false);
                }
            }

            //
            // Check the last 3 points
            //

            if (!Homography3PointsGeometricallyConsistent(
                    x[x1_ptr + 1], x[x1_ptr + 2], x[0],
                    xp2_ptr, xp3_ptr, first_xp1))
            {
                return(false);
            }
            if (!Homography3PointsGeometricallyConsistent(
                    x[x1_ptr + 2], x[0], x[2],
                    xp3_ptr, first_xp1, first_xp2))
            {
                return(false);
            }

            return(true);
        }
        /**
         * SolveHomography4Points
         * Solve for the homography given 4 point correspondences.
         */
        virtual public bool solveHomography4Points(FeaturePairStack.Item p1, FeaturePairStack.Item p2, FeaturePairStack.Item p3, FeaturePairStack.Item p4, HomographyMat H)
        {
            // T s, sp;
            // T t[2], tp[2];

            NyARDoublePoint2d x1p = new NyARDoublePoint2d(), x2p = new NyARDoublePoint2d(), x3p = new NyARDoublePoint2d(), x4p = new NyARDoublePoint2d();
            NyARDoublePoint2d xp1p = new NyARDoublePoint2d(), xp2p = new NyARDoublePoint2d(), xp3p = new NyARDoublePoint2d(), xp4p = new NyARDoublePoint2d();

            double[] ts  = new double[3];
            double[] tps = new double[3];
            //
            // Condition the points
            //

            if (!condition4Points2d(x1p, x2p, x3p, x4p, ts, p1.ref_, p2.ref_, p3.ref_, p4.ref_))
            {
                return(false);
            }
            if (!condition4Points2d(xp1p, xp2p, xp3p, xp4p, tps, p1.query, p2.query, p3.query, p4.query))
            {
                return(false);
            }

            //
            // Solve for the homography
            //

            if (!this.solveHomography4PointsInhomogenous(H, x1p, x2p, x3p, x4p, xp1p, xp2p, xp3p, xp4p))
            {
                return(false);
            }

            //
            // Denomalize the computed homography
            //
            H.denormalizeHomography(ts, tps);
            return(true);
        }