public MainWindow()
        {
            InitializeComponent();

            CaptureDeviceList cl = new CaptureDeviceList();

            m_cap = cl[0];
            m_cap.SetCaptureListener(this);
            m_cap.PrepareCapture(cameraResX, cameraResY, 30); // 800x600 resolution, 30 fps

            NyARParam ap = new NyARParam();

            ap.loadARParamFromFile(AR_CAMERA_FILE);
            ap.changeScreenSize(cameraResX, cameraResY);

            this.m_raster = new NyARRgbRaster(m_cap.video_width, m_cap.video_height, NyARBufferType.BYTE1D_R8G8B8_24,
                                              false);

            NyARCode code = new NyARCode(16, 16); // 16 pixels to detect within black box

            code.loadARPattFromFile(AR_CODE_FILE);
            this.m_ar = new NyARDetectMarker(ap, new NyARCode[] { code }, new double[] { 80.0 }, 1,
                                             NyARBufferType.BYTE1D_B8G8R8_24);
            this.m_ar.setContinueMode(false);

            this.Loaded += MainWindow_Loaded;
        }
Example #2
0
        /// <summary>
        /// Initializes the detector for single marker detection.
        /// </summary>
        /// <param name="width">The width of the buffer that will be used for detection.</param>
        /// <param name="height">The height of the buffer that will be used for detection.</param>
        /// <param name="nearPlane">The near view plane of the frustum.</param>
        /// <param name="farPlane">The far view plane of the frustum.</param>
        /// <param name="markers">A list of markers that should be detected.</param>
        /// <param name="bufferType">The type of the buffer.</param>
        /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param>
        protected void Initialize(int width, int height, double nearPlane, double farPlane, IList <Marker> markers, int bufferType, bool adaptive = false)
        {
            // Check arguments
            if (markers == null || !markers.Any())
            {
                throw new ArgumentNullException("markers");
            }

            // Member init
            this.bufferWidth  = width;
            this.bufferHeight = height;
            this.isAdaptive   = adaptive;

            // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers
            int segmentX        = markers[0].SegmentsX;
            int segmentY        = markers[0].SegmentsY;
            var patternMatchers = new List <PatternMatcher>(markers.Count);

            foreach (var marker in markers)
            {
                if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY)
                {
                    throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers");
                }
                patternMatchers.Add(new PatternMatcher(marker));
            }

            // Load deafult camera calibration data
            var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name;

            var streamResInfoCam = Application.GetResourceStream(new Uri(asmName + ";component/data/Camera_Calibration_Default.dat", UriKind.Relative));
            var cameraParameters = new NyARParam();

            using (var cameraCalibrationDataStream = streamResInfoCam.Stream)
            {
                cameraParameters.loadARParam(cameraCalibrationDataStream);
                cameraParameters.changeScreenSize(width, height);
            }

            // Get projection matrix from camera calibration data
            this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane);

            // Init detector and necessary data
            var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25);
            var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY);

            this.squareDetector          = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize());
            this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData);

            // Init buffer members
            this.filteredBuffer = new NyARBinRaster(width, height);
            if (adaptive)
            {
                this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType);
            }
            else
            {
                this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType);
            }
        }
Example #3
0
        public void Test()
        {
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file));

            ap.changeScreenSize(320, 240);

            //試験イメージの読み出し(320x240 RGBのRAWデータ)
            StreamReader sr = new StreamReader(data_file);
            BinaryReader bs = new BinaryReader(sr.BaseStream);

            byte[]        raw = bs.ReadBytes(320 * 240 * 3);
            NyARRgbRaster ra  = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_R8G8B8_24, false);

            ra.wrapBuffer(raw);

            MarkerProcessor pr = new MarkerProcessor(ap, ra.getBufferType());

            pr.detectMarker(ra);
            Console.WriteLine(pr.transmat.m00 + "," + pr.transmat.m01 + "," + pr.transmat.m02 + "," + pr.transmat.m03);
            Console.WriteLine(pr.transmat.m10 + "," + pr.transmat.m11 + "," + pr.transmat.m12 + "," + pr.transmat.m13);
            Console.WriteLine(pr.transmat.m20 + "," + pr.transmat.m21 + "," + pr.transmat.m22 + "," + pr.transmat.m23);
            Console.WriteLine(pr.transmat.m30 + "," + pr.transmat.m31 + "," + pr.transmat.m32 + "," + pr.transmat.m33);
            Console.WriteLine(pr.current_id);
            return;
        }
Example #4
0
        public Form1()
        {
            InitializeComponent();
            //ARの設定
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE));

            ap.changeScreenSize(320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            //計算モードの設定
            //キャプチャを作る

            /**************************************************
            *  このコードは、0番目(一番初めに見つかったキャプチャデバイス)
            *  を使用するようにされています。
            *  複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。
            *  n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。
            *  手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。
            **************************************************/
            CaptureDeviceList cl  = new CaptureDeviceList();
            CaptureDevice     cap = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(320, 240, 30);
            this.m_cap = cap;
            //ラスタを作る。
            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height, NyARBufferType.OBJECT_CS_Bitmap);
            //1パターンのみを追跡するクラスを作成
            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);
            //キャプチャを作る(QVGAでフレームレートは30)
            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;

            //AR用カメラパラメタファイルをロードして設定
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE));

            ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT);

            //マーカライブラリ(NyId)の構築
            this._mklib = new RawbitSerialIdTable(10);
            //マーカサイズテーブルの作成(とりあえず全部8cm)
            this._mklib.addAnyItem("any id", 80);

            //Realityの準備
            this._reality        = new NyARRealityD3d(ap, 10, 10000, 2, 10);
            this._reality_source = new NyARRealitySource_DShow(SCREEN_WIDTH, SCREEN_HEIGHT, null, 2, 100);

            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;



            //カメラProjectionの設定
            Matrix tmp = new Matrix();

            this._reality.getD3dCameraFrustum(ref tmp);
            this._device.Transform.Projection = tmp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();

            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //ビューポート設定
            this._device.Viewport = vp;

            //カラーキューブの描画インスタンス
            this._cube = new ColorCube(this._device, 40);
            //背景サーフェイスを作成
            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            return(true);
        }
Example #6
0
        public void Test()
        {
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file));

            ap.changeScreenSize(320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.createFromARPattFile(new StreamReader(code_file), 16, 16);

            //試験イメージの読み出し(320x240 BGRAのRAWデータ)
            StreamReader sr = new StreamReader(data_file);
            BinaryReader bs = new BinaryReader(sr.BaseStream);

            byte[] raw = bs.ReadBytes(320 * 240 * 4);

//            NyARBitmapRaster ra = new NyARBitmapRaster(320, 240);
//            Graphics g = Graphics.FromImage(ra.getBitmap());
//            g.DrawImage(new Bitmap("../../../../../data/320x240ABGR.png"), 0, 0);


            NyARRgbRaster ra = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_B8G8R8X8_32, false);

            ra.wrapBuffer(raw);

            //1パターンのみを追跡するクラスを作成
            NyARSingleDetectMarker ar         = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);
            NyARDoubleMatrix44     result_mat = new NyARDoubleMatrix44();

            ar.setContinueMode(false);
            ar.detectMarkerLite(ra, 100);
            ar.getTransmationMatrix(result_mat);

            //マーカーを検出
            Stopwatch sw = new Stopwatch();

            sw.Start();
            for (int i = 0; i < 1000; i++)
            {
                //変換行列を取得
                ar.detectMarkerLite(ra, 100);
                ar.getTransmationMatrix(result_mat);
            }
            Console.WriteLine(result_mat.m00 + "," + result_mat.m01 + "," + result_mat.m02 + "," + result_mat.m03);
            Console.WriteLine(result_mat.m10 + "," + result_mat.m11 + "," + result_mat.m12 + "," + result_mat.m13);
            Console.WriteLine(result_mat.m20 + "," + result_mat.m21 + "," + result_mat.m22 + "," + result_mat.m23);
            Console.WriteLine(result_mat.m30 + "," + result_mat.m31 + "," + result_mat.m32 + "," + result_mat.m33);
            sw.Stop();
            Console.WriteLine(sw.ElapsedMilliseconds + "[ms]");
            return;
        }
Example #7
0
        public void Test_arDetectMarkerLite()
        {
            Assembly assembly = Assembly.GetExecutingAssembly();

            //AR用カメラパラメタファイルをロード
            NyARParam ap = new NyARParam();

            ap.loadARParam(assembly.GetManifestResourceStream(RES_CAMERA));
            ap.changeScreenSize(320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = new NyARCode(16, 16);
            Stream   sr1  = assembly.GetManifestResourceStream(RES_PATT);

            code.loadARPatt(new StreamReader(sr1));

            //試験イメージの読み出し(320x240 BGRAのRAWデータ)
            StreamReader sr = new StreamReader(assembly.GetManifestResourceStream(RES_DATA));
            BinaryReader bs = new BinaryReader(sr.BaseStream);

            byte[]             raw = bs.ReadBytes(320 * 240 * 4);
            NyARRgbRaster_BGRA ra  = new NyARRgbRaster_BGRA(320, 240, false);

            ra.wrapBuffer(raw);
            //		Blank_Raster ra=new Blank_Raster(320, 240);

            //1パターンのみを追跡するクラスを作成
//            NyARSingleDetectMarker_Quad ar = new NyARSingleDetectMarker_Quad(ap, code, 80.0);
            NyARSingleDetectMarker ar         = new NyARSingleDetectMarker(ap, code, 80.0, ra.getBufferType());
            NyARTransMatResult     result_mat = new NyARTransMatResult();

            ar.setContinueMode(false);
            ar.detectMarkerLite(ra, 100);
            ar.getTransmationMatrix(result_mat);

            //マーカーを検出
            for (int i3 = 0; i3 < 10; i3++)
            {
                Stopwatch sw = new Stopwatch();
                sw.Start();
                for (int i = 0; i < 10; i++)
                {
                    //変換行列を取得
                    ar.detectMarkerLite(ra, 100);
                    ar.getTransmationMatrix(result_mat);
                }
                sw.Stop();
                Debug.WriteLine(sw.ElapsedMilliseconds + "[ms]");
            }
            return;
        }
Example #8
0
        public void Test()
        {
            try {
                NyARParam param = NyARParam.createFromARParamFile(new StreamReader(PARAM_FILE));
                param.changeScreenSize(320, 240);
                NyARReality       reality    = new NyARReality(param.getScreenSize(), 10, 1000, param.getPerspectiveProjectionMatrix(), null, 10, 10);
                NyARRealitySource reality_in = new NyARRealitySource_Reference(320, 240, null, 2, 100, NyARBufferType.BYTE1D_B8G8R8X8_32);

                //試験イメージの読み出し(320x240 BGRAのRAWデータ)
                StreamReader sr  = new StreamReader(DATA_FILE);
                BinaryReader bs  = new BinaryReader(sr.BaseStream);
                byte[]       raw = bs.ReadBytes(320 * 240 * 4);
                Array.Copy(raw, (byte[])reality_in.refRgbSource().getBuffer(), raw.Length);


                Stopwatch sw = new Stopwatch();
                sw.Start();
                for (int i = 0; i < 1000; i++)
                {
                    reality.progress(reality_in);
                }
                sw.Stop();
                Console.WriteLine(sw.ElapsedMilliseconds + "[ms]");
                Console.WriteLine(reality.getNumberOfKnown());
                Console.WriteLine(reality.getNumberOfUnknown());
                Console.WriteLine(reality.getNumberOfDead());
                NyARRealityTarget[] rt = new NyARRealityTarget[10];
                reality.selectUnKnownTargets(rt);
                reality.changeTargetToKnown(rt[0], 2, 80);
                Console.WriteLine(rt[0]._transform_matrix.m00 + "," + rt[0]._transform_matrix.m01 + "," + rt[0]._transform_matrix.m02 + "," + rt[0]._transform_matrix.m03);
                Console.WriteLine(rt[0]._transform_matrix.m10 + "," + rt[0]._transform_matrix.m11 + "," + rt[0]._transform_matrix.m12 + "," + rt[0]._transform_matrix.m13);
                Console.WriteLine(rt[0]._transform_matrix.m20 + "," + rt[0]._transform_matrix.m21 + "," + rt[0]._transform_matrix.m22 + "," + rt[0]._transform_matrix.m23);
                Console.WriteLine(rt[0]._transform_matrix.m30 + "," + rt[0]._transform_matrix.m31 + "," + rt[0]._transform_matrix.m32 + "," + rt[0]._transform_matrix.m33);
            } catch (Exception e) {
                Console.WriteLine(e.StackTrace);
            }
            return;
        }
        public NyARWordsBingoCore(Label lbl, Game currentGame, PictureBox pbx)
        {
            //Instanciando o PictureBox com o recebido do Form
            pbxNyAR = pbx;
            lbNyAR  = lbl;
            gmNyAR  = currentGame;


            //Parâmetros da RA
            NyARParam ap = new NyARParam();

            ap.loadARParamFromFile(AR_CAMERA_FILE);   //Lendo arquivo .dat
            ap.changeScreenSize(640, 480);            //Setando tamanho da câmera.

            //Lendo padrão do marcador
            NyARCode codeKanji = new NyARCode(16, 16);
            NyARCode codeA     = new NyARCode(16, 16);
            NyARCode codeB     = new NyARCode(16, 16);
            NyARCode codeC     = new NyARCode(16, 16);
            NyARCode codeD     = new NyARCode(16, 16);
            NyARCode codeE     = new NyARCode(16, 16);
            NyARCode codeF     = new NyARCode(16, 16);
            NyARCode codeG     = new NyARCode(16, 16);
            NyARCode codeH     = new NyARCode(16, 16);
            NyARCode codeI     = new NyARCode(16, 16);
            NyARCode codeJ     = new NyARCode(16, 16);
            NyARCode codeK     = new NyARCode(16, 16);
            NyARCode codeL     = new NyARCode(16, 16);
            NyARCode codeM     = new NyARCode(16, 16);
            NyARCode codeN     = new NyARCode(16, 16);
            NyARCode codeO     = new NyARCode(16, 16);
            NyARCode codeP     = new NyARCode(16, 16);
            NyARCode codeQ     = new NyARCode(16, 16);
            NyARCode codeR     = new NyARCode(16, 16);
            NyARCode codeS     = new NyARCode(16, 16);
            NyARCode codeT     = new NyARCode(16, 16);
            NyARCode codeU     = new NyARCode(16, 16);
            NyARCode codeV     = new NyARCode(16, 16);
            NyARCode codeW     = new NyARCode(16, 16);
            NyARCode codeX     = new NyARCode(16, 16);
            NyARCode codeY     = new NyARCode(16, 16);
            NyARCode codeZ     = new NyARCode(16, 16);

            NyARCode codeBall      = new NyARCode(16, 16);
            NyARCode codeSword     = new NyARCode(16, 16);
            NyARCode codeCar       = new NyARCode(16, 16);
            NyARCode codeBrigadier = new NyARCode(16, 16);
            NyARCode codeFork      = new NyARCode(16, 16);
            NyARCode codePen       = new NyARCode(16, 16);
            NyARCode codeFish      = new NyARCode(16, 16);


            //Carregando os arquivos dos marcadores no NyARCode
            codeKanji.loadARPattFromFile(AR_CODE_FILE_KANJI);
            codeA.loadARPattFromFile(AR_CODE_FILE_A);
            codeB.loadARPattFromFile(AR_CODE_FILE_B);
            codeC.loadARPattFromFile(AR_CODE_FILE_C);
            codeD.loadARPattFromFile(AR_CODE_FILE_D);
            codeE.loadARPattFromFile(AR_CODE_FILE_E);
            codeF.loadARPattFromFile(AR_CODE_FILE_F);
            codeG.loadARPattFromFile(AR_CODE_FILE_G);
            codeH.loadARPattFromFile(AR_CODE_FILE_H);
            codeI.loadARPattFromFile(AR_CODE_FILE_I);
            codeJ.loadARPattFromFile(AR_CODE_FILE_J);
            codeK.loadARPattFromFile(AR_CODE_FILE_K);
            codeL.loadARPattFromFile(AR_CODE_FILE_L);
            codeM.loadARPattFromFile(AR_CODE_FILE_M);
            codeN.loadARPattFromFile(AR_CODE_FILE_N);
            codeO.loadARPattFromFile(AR_CODE_FILE_O);
            codeP.loadARPattFromFile(AR_CODE_FILE_P);
            codeQ.loadARPattFromFile(AR_CODE_FILE_Q);
            codeR.loadARPattFromFile(AR_CODE_FILE_R);
            codeS.loadARPattFromFile(AR_CODE_FILE_S);
            codeT.loadARPattFromFile(AR_CODE_FILE_T);
            codeU.loadARPattFromFile(AR_CODE_FILE_U);
            codeV.loadARPattFromFile(AR_CODE_FILE_V);
            codeW.loadARPattFromFile(AR_CODE_FILE_W);
            codeX.loadARPattFromFile(AR_CODE_FILE_X);
            codeY.loadARPattFromFile(AR_CODE_FILE_Y);
            codeZ.loadARPattFromFile(AR_CODE_FILE_Z);

            codeBall.loadARPattFromFile(AR_CODE_FILE_BALL);
            codeSword.loadARPattFromFile(AR_CODE_FILE_SWORD);
            codeCar.loadARPattFromFile(AR_CODE_FILE_CAR);
            codeBrigadier.loadARPattFromFile(AR_CODE_FILE_BRIGADIER);
            codeFork.loadARPattFromFile(AR_CODE_FILE_FORK);
            codePen.loadARPattFromFile(AR_CODE_FILE_PEN);
            codeFish.loadARPattFromFile(AR_CODE_FILE_FISH);



            double[] WidthArray = { 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0,
                                    80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0,
                                    80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0 };

            NyARCode[] arrayCodes = { codeKanji, codeA, codeB, codeC, codeD, codeE, codeF, codeG, codeH, codeI,
                                      codeJ,     codeK, codeL, codeM, codeN, codeO, codeP, codeQ, codeR, codeS,
                                      codeT,     codeU, codeV, codeW, codeX, codeY, codeZ };

            double[]   WidthArrayImages = { 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0 };
            NyARCode[] arrayCodesImage  = { codeBall, codeSword, codeCar, codeBrigadier, codeFork, codePen, codeFish };


            //Seta propriedades da câmera
            try
            {
                Core.cap.SetCaptureListener(this);
            }
            catch (Exception e)
            {
                MessageBox.Show("Ocorreu um erro no estabelecimento da conexão com a câmera. Verifique se ela está instalada corretamente. Favor, reinicie a aplicação.", "Erro!");
                Environment.Exit(0);
            }

            Core.cap.PrepareCapture(640, 480, 30);


            //Create a cluster - Instanciando o Raster, passando os parâmetros do vídeo encontrado no CaptureDevice
            Core.raster = Core.InstanceRaster(Core.cap.video_width, Core.cap.video_height);


            //Instanciando os detectores únicos
            this.ballMarkerDetector = new NyARSingleDetectMarker(ap, codeBall, 80.0, Core.raster.getBufferType());
            this.ballMarkerDetector.setContinueMode(true);

            this.brigadierlMarkerDetector = new NyARSingleDetectMarker(ap, codeBrigadier, 80.0, Core.raster.getBufferType());
            this.brigadierlMarkerDetector.setContinueMode(true);

            this.carMarkerDetector = new NyARSingleDetectMarker(ap, codeCar, 80.0, Core.raster.getBufferType());
            this.carMarkerDetector.setContinueMode(true);

            this.forkMarkerDetector = new NyARSingleDetectMarker(ap, codeFork, 80.0, Core.raster.getBufferType());
            this.forkMarkerDetector.setContinueMode(true);

            this.penMarkerDetector = new NyARSingleDetectMarker(ap, codePen, 80.0, Core.raster.getBufferType());
            this.penMarkerDetector.setContinueMode(true);

            this.swordMarkerDetector = new NyARSingleDetectMarker(ap, codeSword, 80.0, Core.raster.getBufferType());
            this.swordMarkerDetector.setContinueMode(true);

            this.fishMarkerDetector = new NyARSingleDetectMarker(ap, codeFish, 80.0, Core.raster.getBufferType());
            this.fishMarkerDetector.setContinueMode(true);



            //Instanciando o detector de todas as letras
            this.markerDetectorLetters = new NyARDetectMarker(ap, arrayCodes, WidthArray, 27, Core.raster.getBufferType());
            this.markerDetectorLetters.setContinueMode(true);

            //Instanciando o detector das imagens que representam a palavra na cartela
            this.markerDetectorImages = new NyARDetectMarker(ap, arrayCodesImage, WidthArrayImages, 7, Core.raster.getBufferType());
            this.markerDetectorImages.setContinueMode(true);


            //Inicia a captura
            //Inicia a captura
            try
            {
                Core.cap.StartCapture();
            }
            catch (Exception e)
            {
                MessageBox.Show("Ocorreu um erro no estabelecimento da conexão com a câmera. Verifique se ela está instalada corretamente.");
                Environment.Exit(0);
            }


            //Abaixo, são as funções que preparam todo o dispositivo e cenário para renderização 3D
            //Necessário descobrir mais detalhes do funcionamento.

            //Preparing the 3D device
            this._device = Core.PrepareD3dDevice(pbx); //Prepara o dispositivo em cima do PictureBox
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;

            //Projection Camera Settings
            Matrix tmp = new Matrix();

            NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp);
            this._device.Transform.Projection = tmp;

            // View conversion settings (set in the left-handed view matrix).
            //From 0,0,0, and facing Z +, Y-axis direction on
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();

            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //Set viewport
            this._device.Viewport = vp;

            //Draw a color cube instance - Instância de um cubo colorido, padrão da biblioteca NyAR
            this._cube = new ColorCube(this._device, 40);

            //Create a background surface
            this._surface = new NyARSurface_XRGB32(this._device, 640, 480);

            //Carregando os meshs (objetos 3D)
            LoadMeshes();
        }
Example #10
0
        public void AR_Initialization(Control topLevelForm)
        {
            //Camera parameters
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE));

            ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT);

            //List of patterns
            ar_code = new NyARCode[Constante.PATT_MAX];

            //Pattern files
            AR_CODE_FILES = new String[Constante.PATT_MAX];

            //Pattern index list
            ar_code_index = new int[Constante.PATT_MAX];

            AR_CODE_FILES[0] = "data/patt_U.dat";
            AR_CODE_FILES[1] = "data/patt_S.dat";
            AR_CODE_FILES[2] = "data/patt_T.dat";
            AR_CODE_FILES[3] = "data/patt_H.dat";
            AR_CODE_FILES[4] = "data/patt_B.dat";


            for (int count = 0; count < Constante.PATT_MAX; count++)
            {
                ar_code[count] = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILES[count]), 16, 16);
            }

            marker_width = new double[Constante.PATT_MAX];
            for (int count = 0; count < Constante.PATT_MAX; count++)
            {
                marker_width[count] = 80.0f;
            }


            this._ar = new NyARDetectMarker(ap, ar_code, marker_width, Constante.PATT_MAX);

            this._ar.setContinueMode(true);

            Matrix tmp = new Matrix();

            NyARD3dUtil.toCameraFrustumRH(ap, 1, 2000, ref tmp);
            m_Device.Transform.Projection = tmp;

            // View Matrix:
            Vector3 camera_position = new Vector3(0, 0, -500);

            camera_position.Normalize();
            //camera_position.Multiply(m_Range);

            m_Device.Transform.View = Matrix.LookAtLH(
                camera_position, new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 1.0f, 0.0f));
            //m_Device.Transform.Projection = Matrix.PerspectiveFovLH((float)Math.PI / 4, SCREEN_WIDTH / SCREEN_HEIGHT, 1, 2000);
            Viewport vp = new Viewport();

            vp.X = 0;
            vp.Y = 0;
            //vp.Height = ap.getScreenSize().h;
            //vp.Width = ap.getScreenSize().w;
            vp.Height = SCREEN_HEIGHT;
            vp.Width  = SCREEN_WIDTH;
            vp.MaxZ   = 1.0f;

            m_Device.Viewport = vp;

            this._surface = new NyARD3dSurface(m_Device, SCREEN_WIDTH, SCREEN_HEIGHT);

            NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat;

            int nb_marker_detected = 0;

            nb_marker_detected = this._ar.detectMarkerLite(this._raster, Constante.binarisation);

            if (nb_marker_detected > 0)
            {
                bestConfidence = 0;
                bestMarker     = 0;

                //if the number of detected markers is bigger than the max number available we set it back to MAX number
                if (nb_marker_detected > Constante.MARK_MAX)
                {
                    nb_marker_detected = Constante.MARK_MAX;
                }

                //get the best confidence from the detected markers
                for (int count = 0; count < nb_marker_detected; count++)
                {
                    ar_code_index[count] = this._ar.getARCodeIndex(count);
                    if (this._ar.getConfidence(count) > bestConfidence)
                    {
                        bestConfidence = this._ar.getConfidence(count);
                        bestMarker     = count;
                    }
                    //textBox1.Text += "bestConfidence: " + bestConfidence + "  bestMarker: " + bestMarker+"\n";
                }

                //textBox1.Text += "finally:\nbestConfidence: " + bestConfidence + "  bestMarker: " + bestMarker+"\n\n";

                try
                {
                    //MessageBox.Show("bestMarker: " + bestMarker, "RenderForm.AR_Initialization");
                    this._ar.getTransmationMatrix(bestMarker, nyar_transmat);
                }
                catch (Exception x)
                { //MessageBox.Show(x.ToString(), "RenderForm.AR_Initialize");
                }
                NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat);
            }

            this._nb_marker_detected = nb_marker_detected;

            {
                try
                {
                    this._surface.setRaster(this._raster);
                }
                catch (Exception x)
                {
                    //MessageBox.Show(x.ToString(), "RenderForm.AR_Initialization");
                }
            }
        }
        public void InitTracker(params object[] configs)
        {
            if (!(configs.Length == 3 || configs.Length == 5))
            {
                throw new MarkerException("Problem in InitTracker in NewNyAR");
            }

            int imgWidth = 0;
            int imgHeight = 0;
            try
            {
                imgWidth = (int)configs[0];
                imgHeight = (int)configs[1];
                configFilename = (String)configs[2];
                if (configs.Length == 5)
                {
                    threshold = (int)configs[3];
                    continuousMode = (bool)configs[4];
                }
                else
                {
                    threshold = 100;
                    continuousMode = false;
                }
            }
            catch (Exception)
            {
                throw new MarkerException("Problem in InitTracker in NewNyAR");
            }

            nyARIntSize = new NyARIntSize(imgWidth, imgHeight);

            param = new NyARParam();
            param.loadARParam(TitleContainer.OpenStream(configFilename));
            param.changeScreenSize(nyARIntSize.w, nyARIntSize.h);

            camProjMat = GetProjectionMatrix(param, zNearPlane, zFarPlane);

            INyARMarkerSystemConfig arMarkerSystemConfig = new NyARMarkerSystemConfig(param);
            markerSystem = new NyARMarkerSystem(arMarkerSystemConfig);

            initialized = true;
        }
Example #12
0
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);

            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;



            //this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height,NyARBufferType.BYTE1D_B8G8R8X8_32);

            #region my code
            try
            {
                byte[] bimg = service.getb();
                //if(bimg != null)
                {
                    Image img = byteToImage(bimg);
                    if (img != null)
                    {
                        //frm.textBox1.Text = img.ToString();
                        this._raster = new NyARBitmapRaster((Bitmap)img);
                    }
                }
                //else
            }
            catch (Exception x)
            {
                //MessageBox.Show(x.ToString());
            }
            #endregion


            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE));
            ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT);


            NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE), 16, 16);


            this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT);


            this._ar.setContinueMode(true);


            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;



            Matrix tmp = new Matrix();
            NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp);
            this._device.Transform.Projection = tmp;


            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();
            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;

            this._device.Viewport = vp;


            this._cube = new ColorCube(this._device, 40);



            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            this._is_marker_enable = false;
            return(true);
        }