Exemple #1
0
        public static void toCameraViewRH(NyARDoubleMatrix44 mat, double i_scale, ref NyARDoubleMatrix44 o_mat)
        {
            o_mat.m00 = (float)-mat.m00;
            o_mat.m01 = (float)mat.m01;
            o_mat.m02 = (float)mat.m02;

            o_mat.m10 = (float)mat.m10;
            o_mat.m11 = (float)-mat.m11;
            o_mat.m12 = (float)-mat.m12;

            o_mat.m20 = (float)-mat.m20;
            o_mat.m21 = (float)mat.m21;
            o_mat.m22 = (float)mat.m22;

            o_mat.m30 = (float)0.0;
            o_mat.m31 = (float)0.0;
            o_mat.m32 = (float)0.0;
            double scale = 1 / i_scale;

            o_mat.m03 = (float)(mat.m03 * scale);
            o_mat.m13 = -(float)(mat.m13 * scale);
            o_mat.m23 = (float)(mat.m23 * scale);
            o_mat.m33 = (float)1.0;
            return;
        }
Exemple #2
0
        /**
         * Direct3d形式のカメラビュー行列に変換します。
         */
        public static void toD3dCameraView(NyARDoubleMatrix44 i_ny_result, float i_scale, ref Matrix o_result)
        {
            Matrix m;

            m.M11 = (float)i_ny_result.m00;
            m.M12 = -(float)i_ny_result.m10;
            m.M13 = -(float)i_ny_result.m20;
            m.M14 = 0;
            m.M21 = (float)i_ny_result.m01;
            m.M22 = -(float)i_ny_result.m11;
            m.M23 = -(float)i_ny_result.m21;
            m.M24 = 0;
            m.M31 = (float)i_ny_result.m02;
            m.M32 = -(float)i_ny_result.m12;
            m.M33 = -(float)i_ny_result.m22;
            m.M34 = 0;
            float scale = (1 / i_scale);

            m.M41 = (float)i_ny_result.m03 * scale;
            m.M42 = -(float)i_ny_result.m13 * scale;
            m.M43 = -(float)i_ny_result.m23 * scale;
            m.M44 = 1;

            o_result = m;
            return;
        }
        private void CameraZm_NewVideoSample(object sender, WPFMediaKit.DirectShow.MediaPlayers.VideoSampleArgs e)
        {
            Dispatcher.Invoke(new Action(delegate()
            {
                System.Drawing.Bitmap oNewFrame = e.VideoFrame;
                //byte[] oFrameMomeryStream = this.ConvertBitmapToBytes(oNewFrame);
                //IntPtr ptr = oNewFrame.GetHbitmap();
                this.MyArRaster.wrapBuffer(oNewFrame);

                if (this.MySingleDetectMarker.detectMarkerLite(this.MyArRaster, 127))
                {
                    this.CvMainZm.Children.Clear();
                    NyARDoubleMatrix44 oResultMat = new NyARDoubleMatrix44();
                    this.MySingleDetectMarker.getTransmationMatrix(oResultMat);

                    NyARSquare square = this.MySingleDetectMarker.refSquare();

                    Polygon oPolygon = new Polygon()
                    {
                        SnapsToDevicePixels = true,
                        Fill    = new SolidColorBrush(Colors.Violet),
                        Opacity = 0.8,
                        Stroke  = new SolidColorBrush(Colors.Red)
                    };
                    oPolygon.Points = new PointCollection(new Point[] {
                        new Point(square.sqvertex[0].x, 600 - square.sqvertex[0].y),
                        new Point(square.sqvertex[1].x, 600 - square.sqvertex[1].y),
                        new Point(square.sqvertex[2].x, 600 - square.sqvertex[2].y),
                        new Point(square.sqvertex[3].x, 600 - square.sqvertex[3].y)
                    });
                    this.CvMainZm.Children.Add(oPolygon);
                }
            }));
        }
Exemple #4
0
        /* 非同期イベントハンドラ
         * CaptureDeviceからのイベントをハンドリングして、バッファとテクスチャを更新する。
         */
        public void OnBuffer(CaptureDevice i_sender, double i_sample_time, IntPtr i_buffer, int i_buffer_len)
        {
            int w = i_sender.video_width;
            int h = i_sender.video_height;
            int s = w * (i_sender.video_bit_count / 8);
            NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat;

            //テクスチャにRGBを取り込み()
            lock (this)
            {
                //カメラ映像をARのバッファにコピー
                this._raster.setBuffer(i_buffer, i_buffer_len, i_sender.video_vertical_flip);

                //マーカーは見つかったかな?
                bool is_marker_enable = this._ar.detectMarkerLite(this._raster, 110);
                if (is_marker_enable)
                {
                    //あればMatrixを計算
                    this._ar.getTransmationMatrix(nyar_transmat);
                    NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat);
                }
                this._is_marker_enable = is_marker_enable;
                //テクスチャ内容を更新
                this._surface.setRaster(this._raster);
            }
            return;
        }
Exemple #5
0
        public Form1()
        {
            InitializeComponent();
            //ARの設定
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            //計算モードの設定
            //キャプチャを作る

            /**************************************************
            *  このコードは、0番目(一番初めに見つかったキャプチャデバイス)
            *  を使用するようにされています。
            *  複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。
            *  n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。
            *  手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。
            **************************************************/
            CaptureDeviceList cl  = new CaptureDeviceList();
            CaptureDevice     cap = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(320, 240, 30);
            this.m_cap = cap;
            //ラスタを作る。
            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height);
            //1パターンのみを追跡するクラスを作成
            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
	    /**
	     * Make tansform matrix by ICP algorism.
	     * i_prev_result parameter is not effective. should set 0.
	     */
	    public bool transMatContinue(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 i_prev_result, double i_prev_err,
			    NyARDoubleMatrix44 o_result, NyARTransMatResultParam o_param)
	    {
		    if(this._icpp.icpPoint(i_square.sqvertex,i_offset.vertex,4,o_result,o_result,o_param)){
			    return true;
		    }
		    return false;
	    }
Exemple #7
0
        /* カメラのプロジェクションMatrix(RH)を返します。
         * このMatrixはMicrosoft.DirectX.Direct3d.Device.Transform.Projectionに設定できます。
         */
        public static void toCameraFrustumRH(NyARPerspectiveProjectionMatrix i_promat, NyARIntSize i_size, double i_scale, double i_near, double i_far, ref Matrix o_d3d_projection)
        {
            NyARDoubleMatrix44 m = new NyARDoubleMatrix44();

            i_promat.makeCameraFrustumRH(i_size.w, i_size.h, i_near * i_scale, i_far * i_scale, m);
            NyARD3dUtil.mat44ToD3dMatrixT(m, ref o_d3d_projection);
            return;
        }
        /**
         * この関数は、遠近法のパラメータを計算して、返却します。
         */
        public override sealed bool getParam(int i_dest_w, int i_dest_h, double x1, double y1, double x2, double y2, double x3, double y3, double x4, double y4, double[] o_param)
        {
            double ltx = this._local_x;
            double lty = this._local_y;
            double rbx = ltx + i_dest_w;
            double rby = lty + i_dest_h;

            NyARDoubleMatrix44 mat_x = new NyARDoubleMatrix44();
            mat_x.m00 = ltx; mat_x.m01 = lty; mat_x.m02 = -ltx * x1; mat_x.m03 = -lty * x1;
            mat_x.m10 = rbx; mat_x.m11 = lty; mat_x.m12 = -rbx * x2; mat_x.m13 = -lty * x2;
            mat_x.m20 = rbx; mat_x.m21 = rby; mat_x.m22 = -rbx * x3; mat_x.m23 = -rby * x3;
            mat_x.m30 = ltx; mat_x.m31 = rby; mat_x.m32 = -ltx * x4; mat_x.m33 = -rby * x4;
            mat_x.inverse(mat_x);
            NyARDoubleMatrix44 mat_y = new NyARDoubleMatrix44();
            mat_y.m00 = ltx; mat_y.m01 = lty; mat_y.m02 = -ltx * y1; mat_y.m03 = -lty * y1;
            mat_y.m10 = rbx; mat_y.m11 = lty; mat_y.m12 = -rbx * y2; mat_y.m13 = -lty * y2;
            mat_y.m20 = rbx; mat_y.m21 = rby; mat_y.m22 = -rbx * y3; mat_y.m23 = -rby * y3;
            mat_y.m30 = ltx; mat_y.m31 = rby; mat_y.m32 = -ltx * y4; mat_y.m33 = -rby * y4;
            mat_y.inverse(mat_y);
            double a = mat_x.m20 * x1 + mat_x.m21 * x2 + mat_x.m22 * x3 + mat_x.m23 * x4;
            double b = mat_x.m20 + mat_x.m21 + mat_x.m22 + mat_x.m23;
            double d = mat_x.m30 * x1 + mat_x.m31 * x2 + mat_x.m32 * x3 + mat_x.m33 * x4;
            double f = mat_x.m30 + mat_x.m31 + mat_x.m32 + mat_x.m33;

            double g = mat_y.m20 * y1 + mat_y.m21 * y2 + mat_y.m22 * y3 + mat_y.m23 * y4;
            double h = mat_y.m20 + mat_y.m21 + mat_y.m22 + mat_y.m23;
            double i = mat_y.m30 * y1 + mat_y.m31 * y2 + mat_y.m32 * y3 + mat_y.m33 * y4;
            double j = mat_y.m30 + mat_y.m31 + mat_y.m32 + mat_y.m33;

            NyARDoubleMatrix22 tm = new NyARDoubleMatrix22();
            tm.m00 = b;
            tm.m01 = -h;
            tm.m10 = f;
            tm.m11 = -j;
            tm.inverse(tm);

            double A, B, C, D, E, F, G, H;

            C = tm.m00 * (a - g) + tm.m01 * (d - i);	//C
            F = tm.m10 * (a - g) + tm.m11 * (d - i);	//F
            G = a - C * b;
            H = d - C * f;
            A = (mat_x.m00 * x1 + mat_x.m01 * x2 + mat_x.m02 * x3 + mat_x.m03 * x4) - C * (mat_x.m00 + mat_x.m01 + mat_x.m02 + mat_x.m03);
            B = (mat_x.m10 * x1 + mat_x.m11 * x2 + mat_x.m12 * x3 + mat_x.m13 * x4) - C * (mat_x.m10 + mat_x.m11 + mat_x.m12 + mat_x.m13);
            D = (mat_y.m00 * y1 + mat_y.m01 * y2 + mat_y.m02 * y3 + mat_y.m03 * y4) - F * (mat_y.m00 + mat_y.m01 + mat_y.m02 + mat_y.m03);
            E = (mat_y.m10 * y1 + mat_y.m11 * y2 + mat_y.m12 * y3 + mat_y.m13 * y4) - F * (mat_y.m10 + mat_y.m11 + mat_y.m12 + mat_y.m13);

            o_param[0] = A;
            o_param[1] = B;
            o_param[2] = C;
            o_param[3] = D;
            o_param[4] = E;
            o_param[5] = F;
            o_param[6] = G;
            o_param[7] = H;

            return true;
        }
Exemple #9
0
 public NyARIcp(NyARParam i_param)
 {
     this._ref_matXc2U              = i_param.getPerspectiveProjectionMatrix();
     this._maxLoop                  = ICP_MAX_LOOP;
     this.breakLoopErrorThresh      = ICP_BREAK_LOOP_ERROR_THRESH;
     this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
     this.breakLoopErrorThresh2     = ICP_BREAK_LOOP_ERROR_THRESH2;
     this.inlierProb                = ICP_INLIER_PROBABILITY;
 }
Exemple #10
0
 /**
  * Make tansform matrix by ICP algorism.
  * i_prev_result parameter is not effective. should set 0.
  */
 public bool transMatContinue(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 i_prev_result, double i_prev_err,
                              NyARDoubleMatrix44 o_result, NyARTransMatResultParam o_param)
 {
     if (this._icpp.icpPoint(i_square.sqvertex, i_offset.vertex, 4, o_result, o_result, o_param))
     {
         return(true);
     }
     return(false);
 }
Exemple #11
0
 public NyARIcp(NyARDoubleMatrix44 i_projection_matrix)
 {
     this._ref_matXc2U              = i_projection_matrix;
     this._maxLoop                  = ICP_MAX_LOOP;
     this.breakLoopErrorThresh      = ICP_BREAK_LOOP_ERROR_THRESH;
     this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
     this.breakLoopErrorThresh2     = ICP_BREAK_LOOP_ERROR_THRESH2;
     this.inlierProb                = ICP_INLIER_PROBABILITY;
 }
Exemple #12
0
 public NyARIcp(NyARParam i_param)
 {
     this._ref_matXc2U = i_param.getPerspectiveProjectionMatrix();
     this._maxLoop = ICP_MAX_LOOP;
     this.breakLoopErrorThresh = ICP_BREAK_LOOP_ERROR_THRESH;
     this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
     this.breakLoopErrorThresh2 = ICP_BREAK_LOOP_ERROR_THRESH2;
     this.inlierProb = ICP_INLIER_PROBABILITY;
 }
        /**
         * Make tansform matrix by ICP algorism.
         */
	    public bool transMat(NyARSquare i_square, NyARRectOffset i_offset,NyARDoubleMatrix44 i_result,NyARTransMatResultParam o_param)
	    {
		    if(this._icpc.icpGetInitXw2Xc_from_PlanarData(i_square.sqvertex,i_offset.vertex,4,i_result)){
			    if(this._icpp.icpPoint(i_square.sqvertex,i_offset.vertex,4,i_result,i_result,o_param)){
				    return true;
			    }
		    }
		    return false;
	    }
Exemple #14
0
        public static void restoreOutputOffset(NyARDoubleMatrix44 conv, NyARDoublePoint3d i_offset)
        {
            double dx = i_offset.x;
            double dy = i_offset.y;
            double dz = i_offset.z;

            conv.m03 = (conv.m03 - conv.m00 * dx - conv.m01 * dy - conv.m02 * dz);
            conv.m13 = (conv.m13 - conv.m10 * dx - conv.m11 * dy - conv.m12 * dz);
            conv.m23 = (conv.m23 - conv.m20 * dx - conv.m21 * dy - conv.m22 * dz);
            return;
        }
Exemple #15
0
 /**
  * Make tansform matrix by ICP algorism.
  */
 public bool transMat(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 i_result, NyARTransMatResultParam o_param)
 {
     if (this._icpc.icpGetInitXw2Xc_from_PlanarData(i_square.sqvertex, i_offset.vertex, 4, i_result))
     {
         if (this._icpp.icpPoint(i_square.sqvertex, i_offset.vertex, 4, i_result, i_result, o_param))
         {
             return(true);
         }
     }
     return(false);
 }
Exemple #16
0
        /// <summary>
        /// 行列をRotationとVectorへ分解します。
        /// </summary>
        /// <param name='mat'>
        /// Mat.
        /// </param>
        /// <param name='i_scale'>
        /// I_scale.
        /// </param>
        /// <param name='o_pos'>
        /// O_pos.
        /// </param>
        /// <param name='o_rot'>
        /// O_rot.
        /// </param>
        public static void mat2UnityVecRot(NyARDoubleMatrix44 mat, double i_scale, ref Vector3 o_pos, ref Quaternion o_rot)
        {
            mat2Rot(
                mat.m00, mat.m01, mat.m02,
                mat.m10, mat.m11, mat.m12,
                mat.m20, mat.m21, mat.m22, ref o_rot);
            double scale = 1 / i_scale;

            o_pos.x = (float)(mat.m03 * scale);
            o_pos.y = (float)(mat.m13 * scale);
            o_pos.z = (float)(mat.m23 * scale);
            return;
        }
Exemple #17
0
        public void Test()
        {
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file));

            ap.changeScreenSize(320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.createFromARPattFile(new StreamReader(code_file), 16, 16);

            //試験イメージの読み出し(320x240 BGRAのRAWデータ)
            StreamReader sr = new StreamReader(data_file);
            BinaryReader bs = new BinaryReader(sr.BaseStream);

            byte[] raw = bs.ReadBytes(320 * 240 * 4);

//            NyARBitmapRaster ra = new NyARBitmapRaster(320, 240);
//            Graphics g = Graphics.FromImage(ra.getBitmap());
//            g.DrawImage(new Bitmap("../../../../../data/320x240ABGR.png"), 0, 0);


            NyARRgbRaster ra = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_B8G8R8X8_32, false);

            ra.wrapBuffer(raw);

            //1パターンのみを追跡するクラスを作成
            NyARSingleDetectMarker ar         = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);
            NyARDoubleMatrix44     result_mat = new NyARDoubleMatrix44();

            ar.setContinueMode(false);
            ar.detectMarkerLite(ra, 100);
            ar.getTransmationMatrix(result_mat);

            //マーカーを検出
            Stopwatch sw = new Stopwatch();

            sw.Start();
            for (int i = 0; i < 1000; i++)
            {
                //変換行列を取得
                ar.detectMarkerLite(ra, 100);
                ar.getTransmationMatrix(result_mat);
            }
            Console.WriteLine(result_mat.m00 + "," + result_mat.m01 + "," + result_mat.m02 + "," + result_mat.m03);
            Console.WriteLine(result_mat.m10 + "," + result_mat.m11 + "," + result_mat.m12 + "," + result_mat.m13);
            Console.WriteLine(result_mat.m20 + "," + result_mat.m21 + "," + result_mat.m22 + "," + result_mat.m23);
            Console.WriteLine(result_mat.m30 + "," + result_mat.m31 + "," + result_mat.m32 + "," + result_mat.m33);
            sw.Stop();
            Console.WriteLine(sw.ElapsedMilliseconds + "[ms]");
            return;
        }
Exemple #18
0
        public static void ToCameraViewRH(NyARDoubleMatrix44 mat, double i_scale, ref Vector3 o_pos, ref Quaternion o_rot)
        {
            Mat2Rot(
                -mat.m00, mat.m01, mat.m02,
                mat.m10, -mat.m11, -mat.m12,
                -mat.m20, mat.m21, mat.m22,
                ref o_rot);
            double scale = 1 / i_scale;

            o_pos.x = (float)(mat.m03 * scale);
            o_pos.y = -(float)(mat.m13 * scale);
            o_pos.z = (float)(mat.m23 * scale);
            return;
        }
Exemple #19
0
        public NyARIcpStereo(NyARParam i_param_l, NyARParam i_param_r, NyARDoubleMatrix44 i_matC2_l, NyARDoubleMatrix44 i_matC2_r)
        {
            this.maxLoop = ICP_MAX_LOOP;
            this.breakLoopErrorThresh      = ICP_BREAK_LOOP_ERROR_THRESH;
            this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
            this.breakLoopErrorThresh2     = ICP_BREAK_LOOP_ERROR_THRESH2;
            this.inlierProb = ICP_INLIER_PROBABILITY;

            this._ref_matXcl2Ul = i_param_l.getPerspectiveProjectionMatrix();
            this._ref_matXcr2Ur = i_param_r.getPerspectiveProjectionMatrix();
            this._matC2L        = i_matC2_l;
            this._matC2R        = i_matC2_r;
            return;
        }
        public NyARIcpStereo(NyARParam i_param_l, NyARParam i_param_r, NyARDoubleMatrix44 i_matC2_l, NyARDoubleMatrix44 i_matC2_r)
        {
            this.maxLoop = ICP_MAX_LOOP;
            this.breakLoopErrorThresh = ICP_BREAK_LOOP_ERROR_THRESH;
            this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
            this.breakLoopErrorThresh2 = ICP_BREAK_LOOP_ERROR_THRESH2;
            this.inlierProb = ICP_INLIER_PROBABILITY;

            this._ref_matXcl2Ul = i_param_l.getPerspectiveProjectionMatrix();
            this._ref_matXcr2Ur = i_param_r.getPerspectiveProjectionMatrix();
            this._matC2L = i_matC2_l;
            this._matC2R = i_matC2_r;
            return;
        }
Exemple #21
0
        public void OnBuffer(CaptureDevice i_sender, double i_sample_time, IntPtr i_buffer, int i_buffer_len)
        {
            int w = i_sender.video_width;
            int h = i_sender.video_height;
            int s = w * (i_sender.video_bit_count / 8);
            NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat;


            lock (this)
            {
                //this._raster.setBuffer(i_buffer,i_buffer_len, i_sender.video_vertical_flip);

                #region My Code
                try
                {
                    byte[] bimg = service.getb();
                    //if(bimg != null)
                    {
                        Image img = byteToImage(bimg);
                        if (img != null)
                        {
                            //frm.textBox1.Text = img.ToString();
                            this._raster = new NyARBitmapRaster((Bitmap)img);
                        }
                    }
                    //else
                }
                catch (Exception x)
                {
                    //MessageBox.Show(x.ToString());
                }
                #endregion



                bool is_marker_enable = this._ar.detectMarkerLite(this._raster, 110);
                if (is_marker_enable)
                {
                    this._ar.getTransmationMatrix(nyar_transmat);
                    NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat);
                }
                this._is_marker_enable = is_marker_enable;

                this._surface.setRaster(this._raster);
            }
            return;
        }
Exemple #22
0
        /**
         * この関数は、理想座標系の四角系を元に、位置姿勢変換行列を求めます。
         * 計算に過去の履歴を使う点が、{@link #transMat}と異なります。
         * @see INyARTransMat#transMatContinue
         */
        public bool transMatContinue(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 i_prev_result, double i_prev_err, NyARDoubleMatrix44 o_result, NyARTransMatResultParam o_param)
        {
            NyARDoublePoint3d trans = this.__transMat_trans;

            //平行移動量計算機に、2D座標系をセット
            NyARDoublePoint2d[] vertex_2d;
            if (this._ref_dist_factor != null)
            {
                //歪み復元必要
                vertex_2d = this.__transMat_vertex_2d;
                this._ref_dist_factor.ideal2ObservBatch(i_square.sqvertex, vertex_2d, 4);
            }
            else
            {
                //歪み復元は不要
                vertex_2d = i_square.sqvertex;
            }
            this._transsolver.set2dVertex(vertex_2d, 4);

            NyARDoublePoint3d[] vertex_3d = this.__transMat_vertex_3d;
            //回転行列を計算
            this._rotmatrix.initRotByPrevResult(i_prev_result);

            //回転後の3D座標系から、平行移動量を計算
            this._rotmatrix.getPoint3dBatch(i_offset.vertex, vertex_3d, 4);
            this._transsolver.solveTransportVector(vertex_3d, trans);

            //計算結果の最適化(平行移動量と回転行列の最適化)
            double err = this.optimize(this._rotmatrix, trans, this._transsolver, i_offset.vertex, vertex_2d);

            // マトリクスの保存
            o_result.setValue(this._rotmatrix, trans);

            // エラー値が許容範囲でなければTransMatをやり直し
            if (err > AR_GET_TRANS_CONT_MAT_MAX_FIT_ERROR)
            {
                return(false);
            }
            // マトリクスの保存
            o_result.setValue(this._rotmatrix, trans);
            //エラー値保存
            if (o_param != null)
            {
                o_param.last_error = err;
            }
            return(true);
        }
Exemple #23
0
        static void Main(string[] args)
        {
            String img_file    = "../../../../../data/testcase/test.raw";
            String cparam_file = "../../../../../data/testcase/camera_para5.dat";
            String fset3file   = "../../../../../data/testcase/pinball.fset3";
            //カメラパラメータ
            NyARParam param = NyARParam.loadFromARParamFile(File.OpenRead(cparam_file), 640, 480, NyARParam.DISTFACTOR_LT_ARTK5);

            INyARGrayscaleRaster gs = NyARGrayscaleRaster.createInstance(640, 480);
            //試験画像の準備
            {
                INyARRgbRaster rgb = NyARRgbRaster.createInstance(640, 480, NyARBufferType.BYTE1D_B8G8R8X8_32);
                Stream         fs  = File.OpenRead(img_file);
                byte[]         b   = (byte[])rgb.getBuffer();
                fs.Read(b, 0, b.Length);
                INyARRgb2GsFilterRgbAve filter = (INyARRgb2GsFilterRgbAve)rgb.createInterface(typeof(INyARRgb2GsFilterRgbAve));
                filter.convert(gs);
            }
            NyARDoubleMatrix44   tmat = new NyARDoubleMatrix44();
            NyARNftFreakFsetFile f    = NyARNftFreakFsetFile.loadFromfset3File(File.OpenRead(fset3file));
//			KpmHandle kpm=new KpmHandle(new ARParamLT(param));
            Stopwatch             sw     = new Stopwatch();
            FreakKeypointMatching kpm    = new FreakKeypointMatching(param);
            KeyframeMap           keymap = new KeyframeMap(f, 0);

            for (int j = 0; j < 4; j++)
            {
                sw.Reset();
                sw.Start();
                for (int i = 0; i < 20; i++)
                {
                    kpm.updateInputImage(gs);
                    kpm.updateFeatureSet();
                    kpm.kpmMatching(keymap, tmat);
                }
                //FreakKeypointMatching#kMaxNumFeaturesを300にしてテストして。
                sw.Stop();
                System.Console.WriteLine("Total=" + (sw.ElapsedMilliseconds));
                NyARDoubleMatrix44 TEST_PATT = new NyARDoubleMatrix44(new double[] {
                    0.98436354107742652, 0.0066768917838370646, -0.17602226595996517, -191.17967199668533,
                    0.011597578022657571, -0.99956974712564306, 0.026940987645082352, 63.00280574839347,
                    -0.17576664981496215, -0.028561157958401542, -0.98401745160789567, 611.75871553558636,
                    0, 0, 0, 1
                });
                System.Console.WriteLine(TEST_PATT.Equals(tmat));
            }
        }
Exemple #24
0
        /**
         * ARToolKitV5で追加されていた補正
         * @param initConv
         * @param i_pos3d
         * @param i_offset
         * @param i_num
         */
        public static void modifyInputOffset(NyARDoubleMatrix44 initConv, NyARDoublePoint3d[] i_pos3d, int i_num, NyARDoublePoint3d i_offset)
        {
            double dx = i_offset.x;
            double dy = i_offset.y;
            double dz = i_offset.z;

            for (int i = 0; i < i_num; i++)
            {
                i_pos3d[i].x = i_pos3d[i].x - dx;
                i_pos3d[i].y = i_pos3d[i].y - dy;
                i_pos3d[i].z = i_pos3d[i].z - dz;
            }
            initConv.m03 = (initConv.m00 * dx + initConv.m01 * dy + initConv.m02 * dz + initConv.m03);
            initConv.m13 = (initConv.m10 * dx + initConv.m11 * dy + initConv.m12 * dz + initConv.m13);
            initConv.m23 = (initConv.m20 * dx + initConv.m21 * dy + initConv.m22 * dz + initConv.m23);
            return;
        }
Exemple #25
0
        public INyARRgbRaster getPlaneImage(int i_id, NyARSensor i_sensor, double i_x1, double i_y1, double i_x2,
                                            double i_y2, double i_x3, double i_y3, double i_x4, double i_y4, INyARRgbRaster i_raster)
        {
            NyARDoublePoint3d[] pos  = this.__pos3d;
            NyARDoublePoint2d[] pos2 = this.__pos2d;
            NyARDoubleMatrix44  tmat = this.getTransformMatrix(i_id);

            tmat.transform3d(i_x1, i_y1, 0, pos[1]);
            tmat.transform3d(i_x2, i_y2, 0, pos[0]);
            tmat.transform3d(i_x3, i_y3, 0, pos[3]);
            tmat.transform3d(i_x4, i_y4, 0, pos[2]);
            for (int i = 3; i >= 0; i--)
            {
                this._view.getFrustum().project(pos[i], pos2[i]);
            }
            return(i_sensor.getPerspectiveImage(pos2[0].x, pos2[0].y, pos2[1].x, pos2[1].y, pos2[2].x, pos2[2].y,
                                                pos2[3].x, pos2[3].y, i_raster));
        }
Exemple #26
0
 /**
  * この関数は、検出したマーカーの変換行列を計算して、o_resultへ値を返します。
  * 直前に実行した{@link #detectMarkerLite}が成功していないと使えません。
  * @param o_result
  * 変換行列を受け取るオブジェクト。
  * @
  */
 public void getTransmat(NyARDoubleMatrix44 o_result)
 {
     // 一番一致したマーカーの位置とかその辺を計算
     if (this._is_continue)
     {
         //履歴が使えそうか判定
         if (this._last_input_mat == o_result)
         {
             if (this._transmat.transMatContinue(this._square, this._offset, o_result, this._last_result_param.last_error, o_result, this._last_result_param))
             {
                 return;
             }
         }
     }
     //履歴使えないor継続認識失敗
     this._transmat.transMat(this._square, this._offset, o_result, this._last_result_param);
     this._last_input_mat = o_result;
     return;
 }
Exemple #27
0
            /**
             * アプリケーションフレームワークのハンドラ(マーカ出現)
             */
            protected override void onEnterHandler(INyIdMarkerData i_code)
            {
                NyIdMarkerData_RawBit code = (NyIdMarkerData_RawBit)i_code;

                if (code.length > 4)
                {
                    //4バイト以上の時はint変換しない。
                    this.current_id = -1;//undefined_id
                }
                else
                {
                    this.current_id = 0;
                    //最大4バイト繋げて1個のint値に変換
                    for (int i = 0; i < code.length; i++)
                    {
                        this.current_id = (this.current_id << 8) | code.packet[i];
                    }
                }
                this.transmat = null;
            }
Exemple #28
0
 /**
  * AR2Trackingの出力した頂点セットについて、変換行列を求めます。
  * @param initConv
  * @param i_pos2d
  * 理想座標点セット
  * @param i_pos3d
  * 姿勢情報セット。i_pos2dに対応している必要があります。
  * @param i_num
  * 点セットの個数
  * @param conv
  * 計算結果の出力行列
  * @param o_ret_param
  * 返却値のパラメータ
  * @return
  * @throws NyARException
  */
 public bool surfaceTrackingTransmat(NyARDoubleMatrix44 initConv, NyARDoublePoint2d[] i_pos2d, NyARDoublePoint3d[] i_pos3d, int i_num, NyARDoubleMatrix44 conv, NyARTransMatResultParam o_ret_param)
 {
     this._icp.setInlierProbability(this._last_inliner_probability);
     if (!this._icp.icpPoint(i_pos2d, i_pos3d, i_num, initConv, conv, o_ret_param))
     {
         if (i_num < 4)
         {
             return(false);
         }
     }
     if (o_ret_param.last_error > this._surface_threshold)
     {
         this._icp_r.setInlierProbability(0.8);
         this._icp_r.icpPoint(i_pos2d, i_pos3d, i_num, conv, conv, o_ret_param);
         if (o_ret_param.last_error > this._surface_threshold)
         {
             this._icp_r.setInlierProbability(0.6);
             this._icp_r.icpPoint(i_pos2d, i_pos3d, i_num, conv, conv, o_ret_param);
             if (o_ret_param.last_error > this._surface_threshold)
             {
                 this._icp_r.setInlierProbability(0.4);
                 this._icp_r.icpPoint(i_pos2d, i_pos3d, i_num, conv, conv, o_ret_param);
                 if (o_ret_param.last_error > this._surface_threshold)
                 {
                     this._icp_r.setInlierProbability(0.0);
                     this._icp_r.icpPoint(i_pos2d, i_pos3d, i_num, conv, conv, o_ret_param);
                     if (o_ret_param.last_error > this._surface_threshold)
                     {
                         this._last_inliner_probability = 0;
                         return(false);
                     }
                     this._last_inliner_probability = 0;
                 }
                 this._last_inliner_probability = 0.4;
             }
             this._last_inliner_probability = 0.6;
         }
         this._last_inliner_probability = 0.8;
     }
     return(true);
 }
Exemple #29
0
        /**
         * この関数は、理想座標系の四角系を元に、位置姿勢変換行列を求めます。
         * ARToolKitのarGetTransMatに該当します。
         * @see INyARTransMat#transMatContinue
         */
        public bool transMat(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 o_result, NyARTransMatResultParam o_param)
        {
            NyARDoublePoint3d trans = this.__transMat_trans;

            //平行移動量計算機に、2D座標系をセット
            NyARDoublePoint2d[] vertex_2d;
            if (this._ref_dist_factor != null)
            {
                //歪み復元必要
                vertex_2d = this.__transMat_vertex_2d;
                this._ref_dist_factor.ideal2ObservBatch(i_square.sqvertex, vertex_2d, 4);
            }
            else
            {
                //歪み復元は不要
                vertex_2d = i_square.sqvertex;
            }
            this._transsolver.set2dVertex(vertex_2d, 4);
            //回転行列を計算
            if (!this._rotmatrix.initRotBySquare(i_square.line, i_square.sqvertex))
            {
                return(false);
            }

            //回転後の3D座標系から、平行移動量を計算
            NyARDoublePoint3d[] vertex_3d = this.__transMat_vertex_3d;
            this._rotmatrix.getPoint3dBatch(i_offset.vertex, vertex_3d, 4);
            this._transsolver.solveTransportVector(vertex_3d, trans);

            //計算結果の最適化(平行移動量と回転行列の最適化)
            double err = this.optimize(this._rotmatrix, trans, this._transsolver, i_offset.vertex, vertex_2d);

            // マトリクスの保存
            o_result.setValue(this._rotmatrix, trans);
            if (o_param != null)
            {
                o_param.last_error = err;
            }
            return(true);
        }
        /**
         * この関数は、i_index番目に検出したマーカの、変換行列を計算します。
         * 直前に実行した{@link #detectMarkerLite}が成功していないと使えません。
         * @param i_index
         * 検出結果のインデックス番号を指定します。
         * この値は、0から{@link #detectMarkerLite}関数の戻り値-1の数です。
         * @param o_result
         * 結果値を受け取るオブジェクト
         * @
         */
        public void getTransmationMatrix(int i_index, NyARDoubleMatrix44 o_result)
        {
            Debug.Assert(o_result != null);
            NyARDetectMarkerResult result = this._square_detect.result_stack.getItem(i_index);

            // 一番一致したマーカーの位置とかその辺を計算
            if (this._is_continue)
            {
                //履歴が使えそうか判定
                if (result.ref_last_input_matrix == o_result)
                {
                    if (this._transmat.transMatContinue(result.square, this._offset[result.arcode_id], o_result, result.last_result_param.last_error, o_result, result.last_result_param))
                    {
                        return;
                    }
                }
            }
            //履歴使えないor継続認識失敗
            this._transmat.transMat(result.square, this._offset[result.arcode_id], o_result, result.last_result_param);
            result.ref_last_input_matrix = o_result;
            return;
        }
Exemple #31
0
        public Form1()
        {
            InitializeComponent();

            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 640, 480);


            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            CaptureDeviceList  cl         = new CaptureDeviceList();
            CaptureDevice      cap        = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(640, 480, 30);
            this.m_cap = cap;

            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height);

            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
Exemple #32
0
        /**
         * この関数は、ARToolKitスタイルのProjectionMatrixから、 CameraFrustamを計算します。
         * @param i_promat
         * @param i_size
         * スクリーンサイズを指定します。
         * @param i_scale
         * {@link #toCameraFrustumRH(NyARParam i_arparam,double i_scale,double i_near,double i_far,double[] o_gl_projection)}を参照。
         * @param i_near
         * {@link #toCameraFrustumRH(NyARParam i_arparam,double i_scale,double i_near,double i_far,double[] o_gl_projection)}を参照。
         * @param i_far
         * {@link #toCameraFrustumRH(NyARParam i_arparam,double i_scale,double i_near,double i_far,double[] o_gl_projection)}を参照。
         * @param o_gl_projection
         * {@link #toCameraFrustumRH(NyARParam i_arparam,double i_scale,double i_near,double i_far,double[] o_gl_projection)}を参照。
         */
        public static void toCameraFrustumRH(NyARPerspectiveProjectionMatrix i_promat, NyARIntSize i_size, double i_scale, double i_near, double i_far, ref Matrix4x4 o_mat)
        {
            NyARDoubleMatrix44 m = new NyARDoubleMatrix44();

            i_promat.makeCameraFrustumRH(i_size.w, i_size.h, i_near * i_scale, i_far * i_scale, m);
            o_mat.m00 = (float)m.m00;
            o_mat.m01 = (float)m.m01;
            o_mat.m02 = (float)m.m02;
            o_mat.m03 = (float)m.m03;
            o_mat.m10 = (float)m.m10;
            o_mat.m11 = (float)m.m11;
            o_mat.m12 = (float)m.m12;
            o_mat.m13 = (float)m.m13;
            o_mat.m20 = (float)m.m20;
            o_mat.m21 = (float)m.m21;
            o_mat.m22 = (float)m.m22;
            o_mat.m23 = (float)m.m23;
            o_mat.m30 = (float)m.m30;
            o_mat.m31 = (float)m.m31;
            o_mat.m32 = (float)m.m32;
            o_mat.m33 = (float)m.m33;
            return;
        }
Exemple #33
0
        public static void mat44ToD3dMatrixT(NyARDoubleMatrix44 i_src, ref Matrix o_dst)
        {
            o_dst.M11 = (float)i_src.m00;
            o_dst.M21 = (float)i_src.m01;
            o_dst.M31 = (float)i_src.m02;
            o_dst.M41 = (float)i_src.m03;

            o_dst.M12 = (float)i_src.m10;
            o_dst.M22 = (float)i_src.m11;
            o_dst.M32 = (float)i_src.m12;
            o_dst.M42 = (float)i_src.m13;

            o_dst.M13 = (float)i_src.m20;
            o_dst.M23 = (float)i_src.m21;
            o_dst.M33 = (float)i_src.m22;
            o_dst.M43 = (float)i_src.m23;

            o_dst.M14 = (float)i_src.m30;
            o_dst.M24 = (float)i_src.m31;
            o_dst.M34 = (float)i_src.m32;
            o_dst.M44 = (float)i_src.m33;
            return;
        }
        /**
         * この関数は、理想座標系の四角系を元に、位置姿勢変換行列を求めます。
         * ARToolKitのarGetTransMatに該当します。
         * @see INyARTransMat#transMatContinue
         */
        public bool transMat(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 o_result, NyARTransMatResultParam o_param)
        {
            NyARDoublePoint3d trans = this.__transMat_trans;

            //平行移動量計算機に、2D座標系をセット
            NyARDoublePoint2d[] vertex_2d;
            if (this._ref_dist_factor != null)
            {
                //歪み復元必要
                vertex_2d = this.__transMat_vertex_2d;
                this._ref_dist_factor.ideal2ObservBatch(i_square.sqvertex, vertex_2d, 4);
            }
            else
            {
                //歪み復元は不要
                vertex_2d = i_square.sqvertex;
            }
            this._transsolver.set2dVertex(vertex_2d, 4);
            //回転行列を計算
            if (!this._rotmatrix.initRotBySquare(i_square.line, i_square.sqvertex))
            {
                return false;
            }

            //回転後の3D座標系から、平行移動量を計算
            NyARDoublePoint3d[] vertex_3d = this.__transMat_vertex_3d;
            this._rotmatrix.getPoint3dBatch(i_offset.vertex, vertex_3d, 4);
            this._transsolver.solveTransportVector(vertex_3d, trans);

            //計算結果の最適化(平行移動量と回転行列の最適化)
            double err = this.optimize(this._rotmatrix, trans, this._transsolver, i_offset.vertex, vertex_2d);

            // マトリクスの保存
            o_result.setValue(this._rotmatrix, trans);
            if (o_param != null)
            {
                o_param.last_error = err;
            }
            return true;
        }
Exemple #35
0
 /**
  * @deprecated
  * {@link #getTransmat}
  */
 public void getTransmationMatrix(NyARDoubleMatrix44 o_result)
 {
     this.getTransmat(o_result);
     return;
 }
Exemple #36
0
        public override bool icpPoint(NyARDoublePoint2d[] screenCoord,
            NyARDoublePoint3d[] worldCoord, int num,
            NyARDoubleMatrix44 initMatXw2Xc, NyARDoubleMatrix44 o_matxw2xc, NyARTransMatResultParam o_result_param)
        {
            double err0 = 0, err1;
            System.Diagnostics.Debug.Assert(num >= 4);

            NyARIcpUtils.DeltaS dS = this.__dS;
            NyARIcpUtils.U u = this.__u;

            //ワークオブジェクトのリセット
            if (this.__jus.getArraySize() < num)
            {
                this.__jus = new NyARIcpUtils.JusStack(num);
                this.__du = NyARDoublePoint2d.createArray(num);
            }
            NyARIcpUtils.JusStack jus = this.__jus;
            NyARDoublePoint2d[] du = this.__du;

            o_matxw2xc.setValue(initMatXw2Xc);

            double breakLoopErrorThresh = this.getBreakLoopErrorThresh();
            double breakLoopErrorThresh2 = this.getBreakLoopErrorThresh2();
            double breakLoopErrorRatioThresh = this.getBreakLoopErrorRatioThresh();
            double maxLoop = this.getMaxLoop();

            NyARDoubleMatrix44 matXw2U = this.__matXw2U;
            for (int i = 0; ; i++)
            {
                matXw2U.mul(this._ref_matXc2U, o_matxw2xc);
                err1 = 0.0;
                for (int j = 0; j < num; j++)
                {
                    if (!u.setXbyMatX2U(matXw2U, worldCoord[j]))
                    {
                        return false;
                    }
                    double dx = screenCoord[j].x - u.x;
                    double dy = screenCoord[j].y - u.y;
                    err1 += dx * dx + dy * dy;
                    du[j].x = dx;
                    du[j].y = dy;
                }
                err1 /= num;
                if (err1 < breakLoopErrorThresh)
                {
                    break;
                }
                if ((i > 0) && (err1 < breakLoopErrorThresh2) && (err1 / err0 > breakLoopErrorRatioThresh))
                {
                    break;
                }
                if (i == maxLoop)
                {
                    break;
                }
                err0 = err1;
                jus.clear();
                for (int j = 0; j < num; j++)
                {
                    if(!jus.push(this._ref_matXc2U,o_matxw2xc, worldCoord[j],du[j],1.0))
                    {
                        return false;
                    }
                }
                if (!dS.setJusArray(jus))
                {
                    return false;
                }
                dS.makeMat(o_matxw2xc);
                }
            if (o_result_param != null)
            {
                o_result_param.last_error = err1;
            }
            // *err = err1;
            return true;
        }
Exemple #37
0
 public void getMatXc2U(NyARDoubleMatrix44 o_ret)
 {
     o_ret.setValue(this._ref_matXc2U);
 }
Exemple #38
0
 public void setMatXc2U(NyARDoubleMatrix44 i_value)
 {
     this._ref_matXc2U.setValue(i_value);
 }
        /**
         * この関数は、理想座標系の四角系を元に、位置姿勢変換行列を求めます。
         * 計算に過去の履歴を使う点が、{@link #transMat}と異なります。
         * @see INyARTransMat#transMatContinue
         */
        public bool transMatContinue(NyARSquare i_square, NyARRectOffset i_offset, NyARDoubleMatrix44 i_prev_result, double i_prev_err, NyARDoubleMatrix44 o_result, NyARTransMatResultParam o_param)
        {
            NyARDoublePoint3d trans = this.__transMat_trans;

            //平行移動量計算機に、2D座標系をセット
            NyARDoublePoint2d[] vertex_2d;
            if (this._ref_dist_factor != null)
            {
                //歪み復元必要
                vertex_2d = this.__transMat_vertex_2d;
                this._ref_dist_factor.ideal2ObservBatch(i_square.sqvertex, vertex_2d, 4);
            }
            else
            {
                //歪み復元は不要
                vertex_2d = i_square.sqvertex;
            }
            this._transsolver.set2dVertex(vertex_2d, 4);

            NyARDoublePoint3d[] vertex_3d = this.__transMat_vertex_3d;
            //回転行列を計算
            this._rotmatrix.initRotByPrevResult(i_prev_result);

            //回転後の3D座標系から、平行移動量を計算
            this._rotmatrix.getPoint3dBatch(i_offset.vertex, vertex_3d, 4);
            this._transsolver.solveTransportVector(vertex_3d, trans);

            //計算結果の最適化(平行移動量と回転行列の最適化)
            double err = this.optimize(this._rotmatrix, trans, this._transsolver, i_offset.vertex, vertex_2d);

            // マトリクスの保存
            o_result.setValue(this._rotmatrix, trans);

            // エラー値が許容範囲でなければTransMatをやり直し
            if (err > AR_GET_TRANS_CONT_MAT_MAX_FIT_ERROR)
            {
                return false;
            }
            // マトリクスの保存
            o_result.setValue(this._rotmatrix, trans);
            //エラー値保存
            if (o_param != null)
            {
                o_param.last_error = err;
            }
            return true;
        }
Exemple #40
0
	    /**
	     * ICPアルゴリズムによる姿勢推定を行います。
	     * 
	     * @param screenCoord
	     * @param worldCoord
	     * @param num
	     * @param initMatXw2Xc
	     * @param o_matxw2xc
	     * @param o_result_param
	     * 結果パラメータを受け取るオブジェクト。不要な場合はnullを指定可能。
	     * @return
	     * @throws NyARException
	     */
	    public abstract bool icpPoint(NyARDoublePoint2d[] screenCoord,
			    NyARDoublePoint3d[] worldCoord, int num,
			    NyARDoubleMatrix44 initMatXw2Xc, NyARDoubleMatrix44 o_matxw2xc,NyARTransMatResultParam o_result_param);
Exemple #41
0
 /**
  * カメラ座標系の4頂点でかこまれる領域から、RGB画像をo_rasterに取得します。
  * @param i_vertex
  * @param i_matrix
  * i_vertexに適応する変換行列。
  * ターゲットの姿勢行列を指定すると、ターゲット座標系になります。不要ならばnullを設定してください。
  * @param i_resolution
  * @param o_raster
  * @return
  * @throws NyARException
  */
 public bool GetRgbPatt3d(NyARRealitySource i_src, NyARDoublePoint3d[] i_vertex, NyARDoubleMatrix44 i_matrix, int i_resolution, INyARRgbRaster o_raster)
 {
     NyARDoublePoint2d[] vx = NyARDoublePoint2d.createArray(4);
     if (i_matrix != null)
     {
         //姿勢変換してから射影変換
         NyARDoublePoint3d v3d = new NyARDoublePoint3d();
         for (int i = 3; i >= 0; i--)
         {
             i_matrix.transform3d(i_vertex[i], v3d);
             this._ref_prjmat.project(v3d, vx[i]);
         }
     }
     else
     {
         //射影変換のみ
         for (int i = 3; i >= 0; i--)
         {
             this._ref_prjmat.project(i_vertex[i], vx[i]);
         }
     }
     //パターンの取得
     return(i_src.refPerspectiveRasterReader().copyPatt(vx, 0, 0, i_resolution, o_raster));
 }
 public NyARIcpStereoPoint(NyARParam i_param_l, NyARParam i_param_r,
         NyARDoubleMatrix44 i_matC2_l, NyARDoubleMatrix44 i_matC2_r)
     : base(i_param_l, i_param_r, i_matC2_l, i_matC2_r)
 {
     throw new NyARException("This function is not checked.");
 }
        public bool icpStereoPoint(NyARDoublePoint2d[] screenCoord_l,
                NyARDoublePoint3d[] worldCoord_l, int num_l,
                NyARDoublePoint2d[] screenCoord_r,
                NyARDoublePoint3d[] worldCoord_r, int num_r,
                NyARDoubleMatrix44 initMatXw2Xc, NyARDoubleMatrix44 matXw2Xc)
        {
            System.Diagnostics.Debug.Assert(num_l + num_r >= 3);
            double err0 = 0, err1;
            // 6*2*num?
            NyARIcpUtils.DeltaS dS = this.__dS;
            //ワークオブジェクトのリセット		
            if (this.__jus.getArraySize() < num_l + num_r)
            {
                this.__jus = new NyARIcpUtils.JusStack(num_l + num_r);
                this.__du = NyARDoublePoint2d.createArray(num_l + num_r);
            }
            NyARIcpUtils.JusStack jus = this.__jus;
            NyARDoublePoint2d[] du = this.__du;
            NyARIcpUtils.U u = this.__u;
            NyARDoubleMatrix44 matXc2Ul = new NyARDoubleMatrix44();
            NyARDoubleMatrix44 matXc2Ur = new NyARDoubleMatrix44();
            NyARDoubleMatrix44 matXw2Ul = new NyARDoubleMatrix44();
            NyARDoubleMatrix44 matXw2Ur = new NyARDoubleMatrix44();



            matXw2Xc.setValue(initMatXw2Xc);

            matXc2Ul.mul(this._ref_matXcl2Ul, this._matC2L);
            matXc2Ur.mul(this._ref_matXcr2Ur, this._matC2R);

            for (int i = 0; ; i++)
            {
                matXw2Ul.mul(matXc2Ul, matXw2Xc);
                matXw2Ur.mul(matXc2Ur, matXw2Xc);

                err1 = 0.0;
                for (int j = 0; j < num_l; j++)
                {
                    if (!u.setXbyMatX2U(matXw2Ul, worldCoord_l[j]))
                    {
                        return false;
                    }
                    double dx = screenCoord_l[j].x - u.x;
                    double dy = screenCoord_l[j].y - u.y;
                    err1 += dx * dx + dy * dy;
                    du[j].x = dx;
                    du[j].y = dy;
                }
                for (int j = 0; j < num_r; j++)
                {
                    if (!u.setXbyMatX2U(matXw2Ur, worldCoord_r[j]))
                    {
                        return false;
                    }
                    double dx = screenCoord_r[j].x - u.x;
                    double dy = screenCoord_r[j].y - u.y;
                    err1 += dx * dx + dy * dy;
                    du[j + num_l].x = dx;
                    du[j + num_l].y = dy;
                }
                err1 /= (num_l + num_r);

                if (err1 < this.breakLoopErrorThresh)
                {
                    break;
                }
                if (i > 0 && err1 < ICP_BREAK_LOOP_ERROR_THRESH2
                        && err1 / err0 > this.breakLoopErrorRatioThresh)
                {
                    break;
                }
                if (i == this.maxLoop)
                {
                    break;
                }
                err0 = err1;

                for (int j = 0; j < num_l; j++)
                {
                    if (!jus.push(this._ref_matXc2U, matXw2Xc, worldCoord_l[j], du[j], 1.0))
                    {
                        return false;
                    }
                }
                for (int j = 0; j < num_r; j++)
                {
                    if (!jus.push(this._ref_matXc2U, matXw2Xc, worldCoord_r[j], du[j], 1.0))
                    {
                        return false;
                    }
                }
                if (!dS.setJusArray(jus))
                {
                    return false;
                }
                dS.makeMat(matXw2Xc);
            }

            return false;
        }