Beispiel #1
0
        public Form1()
        {
            InitializeComponent();
            //ARの設定
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            //計算モードの設定
            //キャプチャを作る

            /**************************************************
            *  このコードは、0番目(一番初めに見つかったキャプチャデバイス)
            *  を使用するようにされています。
            *  複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。
            *  n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。
            *  手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。
            **************************************************/
            CaptureDeviceList cl  = new CaptureDeviceList();
            CaptureDevice     cap = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(320, 240, 30);
            this.m_cap = cap;
            //ラスタを作る。
            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height);
            //1パターンのみを追跡するクラスを作成
            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
        private void InitARConfigs()
        {
            NyARParam ap   = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 800, 600);
            NyARCode  code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE2), 16, 16);

            this.MyArRaster           = new DsRgbRaster(800, 600);
            this.MySingleDetectMarker = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.MySingleDetectMarker.setContinueMode(false);
        }
Beispiel #3
0
        public void Test()
        {
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file));

            ap.changeScreenSize(320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.createFromARPattFile(new StreamReader(code_file), 16, 16);

            //試験イメージの読み出し(320x240 BGRAのRAWデータ)
            StreamReader sr = new StreamReader(data_file);
            BinaryReader bs = new BinaryReader(sr.BaseStream);

            byte[] raw = bs.ReadBytes(320 * 240 * 4);

//            NyARBitmapRaster ra = new NyARBitmapRaster(320, 240);
//            Graphics g = Graphics.FromImage(ra.getBitmap());
//            g.DrawImage(new Bitmap("../../../../../data/320x240ABGR.png"), 0, 0);


            NyARRgbRaster ra = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_B8G8R8X8_32, false);

            ra.wrapBuffer(raw);

            //1パターンのみを追跡するクラスを作成
            NyARSingleDetectMarker ar         = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);
            NyARDoubleMatrix44     result_mat = new NyARDoubleMatrix44();

            ar.setContinueMode(false);
            ar.detectMarkerLite(ra, 100);
            ar.getTransmationMatrix(result_mat);

            //マーカーを検出
            Stopwatch sw = new Stopwatch();

            sw.Start();
            for (int i = 0; i < 1000; i++)
            {
                //変換行列を取得
                ar.detectMarkerLite(ra, 100);
                ar.getTransmationMatrix(result_mat);
            }
            Console.WriteLine(result_mat.m00 + "," + result_mat.m01 + "," + result_mat.m02 + "," + result_mat.m03);
            Console.WriteLine(result_mat.m10 + "," + result_mat.m11 + "," + result_mat.m12 + "," + result_mat.m13);
            Console.WriteLine(result_mat.m20 + "," + result_mat.m21 + "," + result_mat.m22 + "," + result_mat.m23);
            Console.WriteLine(result_mat.m30 + "," + result_mat.m31 + "," + result_mat.m32 + "," + result_mat.m33);
            sw.Stop();
            Console.WriteLine(sw.ElapsedMilliseconds + "[ms]");
            return;
        }
Beispiel #4
0
        public Form1()
        {
            InitializeComponent();

            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 640, 480);


            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            CaptureDeviceList  cl         = new CaptureDeviceList();
            CaptureDevice      cap        = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(640, 480, 30);
            this.m_cap = cap;

            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height);

            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
Beispiel #5
0
        public bool InitializeApplication(Form1 topLevelForm)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);
            this._raster            = new NyARBitmapRaster(new Bitmap(TEST_IMAGE));


            //AR用カメラパラメタファイルをロードして設定
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), SCREEN_WIDTH, SCREEN_HEIGHT);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            //1パターンのみを追跡するクラスを作成
            this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);

            //計算モードの設定
            this._ar.setContinueMode(true);

            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;


            //カメラProjectionの設定
            Matrix tmp = new Matrix();

            NyARD3dUtil.toCameraFrustumRH(ap, 10, 1000, ref tmp);
            this._device.Transform.Projection = tmp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();

            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //ビューポート設定
            this._device.Viewport = vp;

            //カラーキューブの描画インスタンス
            this._cube = new ColorCube(this._device, 40);

            //背景サーフェイスを作成
            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat;
            //マーカの認識
            bool is_marker_enable = this._ar.detectMarkerLite(this._raster, 110);

            if (is_marker_enable)
            {
                //あればMatrixを計算
                this._ar.getTransmationMatrix(nyar_transmat);
                NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat);
            }
            this._is_marker_enable = is_marker_enable;
            //サーフェイスへ背景をコピー
            this._surface.setRaster(this._raster);
            return(true);
        }
Beispiel #6
0
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);
            //キャプチャを作る(QVGAでフレームレートは30)
            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;

            //ARの設定

            //ARラスタを作る(DirectShowキャプチャ仕様)。
            this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height);

            //AR用カメラパラメタファイルをロードして設定
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), SCREEN_WIDTH, SCREEN_HEIGHT);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            //1パターンのみを追跡するクラスを作成
            this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);

            //計算モードの設定
            this._ar.setContinueMode(true);

            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;


            //カメラProjectionの設定
            Matrix tmp = new Matrix();

            NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp);
            this._device.Transform.Projection = tmp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();

            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //ビューポート設定
            this._device.Viewport = vp;

            //カラーキューブの描画インスタンス
            this._cube = new ColorCube(this._device, 40);


            //背景サーフェイスを作成
            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            this._is_marker_enable = false;
            return(true);
        }
Beispiel #7
0
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);

            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;



            //this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height,NyARBufferType.BYTE1D_B8G8R8X8_32);

            #region my code
            try
            {
                byte[] bimg = service.getb();
                //if(bimg != null)
                {
                    Image img = byteToImage(bimg);
                    if (img != null)
                    {
                        //frm.textBox1.Text = img.ToString();
                        this._raster = new NyARBitmapRaster((Bitmap)img);
                    }
                }
                //else
            }
            catch (Exception x)
            {
                //MessageBox.Show(x.ToString());
            }
            #endregion


            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE));
            ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT);


            NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE), 16, 16);


            this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);


            this._ar.setContinueMode(true);


            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;



            Matrix tmp = new Matrix();
            NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp);
            this._device.Transform.Projection = tmp;


            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();
            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;

            this._device.Viewport = vp;


            this._cube = new ColorCube(this._device, 40);



            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            this._is_marker_enable = false;
            return(true);
        }