示例#1
0
        /// <summary>
        /// Initializes the detector for single marker detection.
        /// </summary>
        /// <param name="width">The width of the buffer that will be used for detection.</param>
        /// <param name="height">The height of the buffer that will be used for detection.</param>
        /// <param name="nearPlane">The near view plane of the frustum.</param>
        /// <param name="farPlane">The far view plane of the frustum.</param>
        /// <param name="markers">A list of markers that should be detected.</param>
        /// <param name="bufferType">The type of the buffer.</param>
        /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param>
        protected void Initialize(int width, int height, double nearPlane, double farPlane, IList <Marker> markers, int bufferType, bool adaptive = false)
        {
            // Check arguments
            if (markers == null || !markers.Any())
            {
                throw new ArgumentNullException("markers");
            }

            // Member init
            this.bufferWidth  = width;
            this.bufferHeight = height;
            this.isAdaptive   = adaptive;

            // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers
            int segmentX        = markers[0].SegmentsX;
            int segmentY        = markers[0].SegmentsY;
            var patternMatchers = new List <PatternMatcher>(markers.Count);

            foreach (var marker in markers)
            {
                if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY)
                {
                    throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers");
                }
                patternMatchers.Add(new PatternMatcher(marker));
            }

            // Load deafult camera calibration data
            var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name;

            var streamResInfoCam = Application.GetResourceStream(new Uri(asmName + ";component/data/Camera_Calibration_Default.dat", UriKind.Relative));
            var cameraParameters = new NyARParam();

            using (var cameraCalibrationDataStream = streamResInfoCam.Stream)
            {
                cameraParameters.loadARParam(cameraCalibrationDataStream);
                cameraParameters.changeScreenSize(width, height);
            }

            // Get projection matrix from camera calibration data
            this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane);

            // Init detector and necessary data
            var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25);
            var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY);

            this.squareDetector          = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize());
            this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData);

            // Init buffer members
            this.filteredBuffer = new NyARBinRaster(width, height);
            if (adaptive)
            {
                this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType);
            }
            else
            {
                this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType);
            }
        }
示例#2
0
 public void notifyOnUpdateCameraParametor(NyARParam i_param, double i_near, double i_far)
 {
     for (int i = 0; i < this._length; i++)
     {
         this._items[i].onUpdateCameraParametor(i_param, i_near, i_far);
     }
 }
        public MainWindow()
        {
            InitializeComponent();

            CaptureDeviceList cl = new CaptureDeviceList();

            m_cap = cl[0];
            m_cap.SetCaptureListener(this);
            m_cap.PrepareCapture(cameraResX, cameraResY, 30); // 800x600 resolution, 30 fps

            NyARParam ap = new NyARParam();

            ap.loadARParamFromFile(AR_CAMERA_FILE);
            ap.changeScreenSize(cameraResX, cameraResY);

            this.m_raster = new NyARRgbRaster(m_cap.video_width, m_cap.video_height, NyARBufferType.BYTE1D_R8G8B8_24,
                                              false);

            NyARCode code = new NyARCode(16, 16); // 16 pixels to detect within black box

            code.loadARPattFromFile(AR_CODE_FILE);
            this.m_ar = new NyARDetectMarker(ap, new NyARCode[] { code }, new double[] { 80.0 }, 1,
                                             NyARBufferType.BYTE1D_B8G8R8_24);
            this.m_ar.setContinueMode(false);

            this.Loaded += MainWindow_Loaded;
        }
        /**
         * この関数は、インスタンスを初期化します。
         * 継承先のクラスから呼び出してください。
         * @param i_param
         * カメラパラメータオブジェクト。このサイズは、{@link #detectMarker}に入力する画像と同じサイズである必要があります。
         * @param i_encoder
         * IDマーカの値エンコーダを指定します。
         * @param i_marker_width
         * マーカの物理縦横サイズをmm単位で指定します。
         * @
         */
        protected void initInstance(NyARParam i_param, INyIdMarkerDataEncoder i_encoder, double i_marker_width)
        {
            //初期化済?
            Debug.Assert(this._initialized == false);

            NyARIntSize scr_size = i_param.getScreenSize();

            // 解析オブジェクトを作る
            this._square_detect = new RleDetector(
                i_param,
                i_encoder,
                new NyIdMarkerPickup());
            this._transmat = new NyARTransMat(i_param);

            // 2値画像バッファを作る
            this._gs_raster = new NyARGrayscaleRaster(scr_size.w, scr_size.h);
            this._histmaker = (INyARHistogramFromRaster)this._gs_raster.createInterface(typeof(INyARHistogramFromRaster));
            //ワーク用のデータオブジェクトを2個作る
            this._data_current     = i_encoder.createDataInstance();
            this._threshold_detect = new NyARHistogramAnalyzer_SlidePTile(15);
            this._initialized      = true;
            this._is_active        = false;
            this._offset           = new NyARRectOffset();
            this._offset.setSquare(i_marker_width);
            return;
        }
示例#5
0
        public Form1()
        {
            InitializeComponent();
            //ARの設定
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            //計算モードの設定
            //キャプチャを作る

            /**************************************************
            *  このコードは、0番目(一番初めに見つかったキャプチャデバイス)
            *  を使用するようにされています。
            *  複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。
            *  n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。
            *  手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。
            **************************************************/
            CaptureDeviceList cl  = new CaptureDeviceList();
            CaptureDevice     cap = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(320, 240, 30);
            this.m_cap = cap;
            //ラスタを作る。
            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height);
            //1パターンのみを追跡するクラスを作成
            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
示例#6
0
        public D3dManager(Form i_main_window, NyARParam i_nyparam, int i_profile_id)
        {
            PresentParameters pp = new PresentParameters();

            // ウインドウモードなら true、フルスクリーンモードなら false を指定
            pp.Windowed = true;
            // スワップとりあえずDiscardを指定。
            pp.SwapEffect             = SwapEffect.Discard;
            pp.EnableAutoDepthStencil = true;
            pp.AutoDepthStencilFormat = DepthFormat.D16;
            pp.BackBufferCount        = 0;
            pp.BackBufferFormat       = Format.R5G6B5;
            this._d3d_device          = new Device(0, DeviceType.Default, i_main_window.Handle, CreateFlags.None, pp);

            //ビューポートを指定
            float scale = setupView(i_nyparam, i_main_window.ClientSize);

            this._scale = scale;
            // ライトを無効
            this._d3d_device.RenderState.Lighting = false;

            //カメラ画像の転写矩形を作成
            Viewport vp = this._d3d_device.Viewport;

            this._view_rect = new Rectangle(vp.X, vp.Y, vp.Width, vp.Height);

            NyARIntSize cap_size = i_nyparam.getScreenSize();

            this._background_size = new Size(cap_size.w, cap_size.h);
            return;
        }
示例#7
0
            public MarkerProcessor(NyARParam i_cparam, int i_raster_format)
            {
                initInstance(i_cparam, new NyIdMarkerDataEncoder_RawBit(), 100);

                //アプリケーションフレームワークの初期化
                return;
            }
示例#8
0
 protected NyARSingleCameraSystem(NyARParam i_ref_cparam)
 {
     this._observer  = new ObserverList(3);
     this._ref_param = i_ref_cparam;
     this._frustum   = new NyARFrustum();
     this.setProjectionMatrixClipping(FRUSTUM_DEFAULT_NEAR_CLIP, FRUSTUM_DEFAULT_FAR_CLIP);
 }
示例#9
0
        public void Test()
        {
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file));

            ap.changeScreenSize(320, 240);

            //試験イメージの読み出し(320x240 RGBのRAWデータ)
            StreamReader sr = new StreamReader(data_file);
            BinaryReader bs = new BinaryReader(sr.BaseStream);

            byte[]        raw = bs.ReadBytes(320 * 240 * 3);
            NyARRgbRaster ra  = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_R8G8B8_24, false);

            ra.wrapBuffer(raw);

            MarkerProcessor pr = new MarkerProcessor(ap, ra.getBufferType());

            pr.detectMarker(ra);
            Console.WriteLine(pr.transmat.m00 + "," + pr.transmat.m01 + "," + pr.transmat.m02 + "," + pr.transmat.m03);
            Console.WriteLine(pr.transmat.m10 + "," + pr.transmat.m11 + "," + pr.transmat.m12 + "," + pr.transmat.m13);
            Console.WriteLine(pr.transmat.m20 + "," + pr.transmat.m21 + "," + pr.transmat.m22 + "," + pr.transmat.m23);
            Console.WriteLine(pr.transmat.m30 + "," + pr.transmat.m31 + "," + pr.transmat.m32 + "," + pr.transmat.m33);
            Console.WriteLine(pr.current_id);
            return;
        }
示例#10
0
 public NyARSurfaceTrackingTransmatUtils(NyARParam i_ref_param, double i_tracking_threshold)
 {
     this._surface_threshold = i_tracking_threshold;
     this._icp   = new NyARIcpPoint(i_ref_param.getPerspectiveProjectionMatrix());
     this._icp_r = new NyARIcpPointRobust(i_ref_param.getPerspectiveProjectionMatrix());
     this._last_inliner_probability = 0;
 }
示例#11
0
        public MainWindow()
        {
            InitializeComponent();

            this.GdMainZm.Height             = 600;
            this.CameraZm.Height             = 600;
            this.CameraZm.DesiredPixelHeight = 600;

            this.GdMainZm.Width             = 800;
            this.CameraZm.Width             = 800;
            this.CameraZm.DesiredPixelWidth = 800;

            this.CameraZm.EnableSampleGrabbing = true;
            this.CameraZm.NewVideoSample      += CameraZm_NewVideoSample;

            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 800, 600);

            this.ARRgbRaster = new NyARRgbRaster_BYTE1D_B8G8R8_24(800, 600, false);

            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            this.ARDetectMarker = new NyARDetectMarker(ap, new NyARCode[] { code }, new double[] { 80.0 }, 1);
            this.ARDetectMarker.setContinueMode(false);

            this.Loaded += MainWindow_Loaded;
        }
示例#12
0
 public NyARSingleDetectMarker_NyARTK_FITTING_ARTKv2(NyARParam i_ref_param, NyARCode i_ref_code, double i_marker_width)
     : base(i_ref_param, i_ref_code, i_marker_width)
 {
     this._inst_patt     = new NyARColorPatt_Perspective(i_ref_code.getWidth(), i_ref_code.getHeight(), 4, 25);
     this._transmat      = new NyARTransMat_ARToolKit(i_ref_param);
     this._square_detect = new NyARSingleDetectMarker_ARTKv2.ARTKDetector(this, i_ref_param.getScreenSize());
 }
        protected void initInstance(
            INyARColorPatt i_patt_inst,
            NyARSquareContourDetector i_sqdetect_inst,
            INyARTransMat i_transmat_inst,
            INyARRasterFilter_Rgb2Bin i_filter,
            NyARParam i_ref_param,
            NyARCode i_ref_code,
            double i_marker_width)
        {
            NyARIntSize scr_size = i_ref_param.getScreenSize();

            // 解析オブジェクトを作る
            this._square_detect = i_sqdetect_inst;
            this._transmat      = i_transmat_inst;
            this._tobin_filter  = i_filter;
            //2値画像バッファを作る
            this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h);
            //パターンの一致検索処理用
            this._inst_patt      = i_patt_inst;
            this._deviation_data = new NyARMatchPattDeviationColorData(i_ref_code.getWidth(), i_ref_code.getHeight());
            this._coordline      = new NyARCoord2Linear(i_ref_param.getScreenSize(), i_ref_param.getDistortionFactor());
            this._match_patt     = new NyARMatchPatt_Color_WITHOUT_PCA(i_ref_code);
            //オフセットを作成
            this._offset = new NyARRectOffset();
            this._offset.setSquare(i_marker_width);
            return;
        }
 /**
  * コンストラクタです。カメラパラメータにサンプル値(../Data/camera_para.dat)をロードして、コンフィギュレーションを生成します。
  * @param i_width
  * @param i_height
  * @
  */
 public NyARMarkerSystemConfig(NyARParam i_param, int i_transmat_algo_type)
 {
     Debug.Assert(1 <= i_transmat_algo_type && i_transmat_algo_type <= 3);
     this._param = i_param;
     this._transmat_algo_type = i_transmat_algo_type;
     return;
 }
示例#15
0
        private float setupView(NyARParam i_nyparam, Size i_client_size)
        {
            NyARIntSize cap_size = i_nyparam.getScreenSize();
            float       scale;
            int         new_w, new_h;

            //縦にあわせてみる。
            scale = (float)i_client_size.Height / (float)cap_size.h;
            new_h = i_client_size.Height;
            new_w = (int)((float)cap_size.w * scale);
            //幅が収まってないなら、幅に合わせる。
            if (new_w > i_client_size.Width)
            {
                scale = (float)i_client_size.Width / (float)cap_size.w;
                new_w = i_client_size.Width;
                new_h = (int)(cap_size.h * scale);
            }
            //ビューポート作成
            Viewport vp = new Viewport();

            vp.X      = (i_client_size.Width - new_w) / 2;
            vp.Y      = (i_client_size.Height - new_h) / 2;
            vp.Height = new_h;
            vp.Width  = new_w;
            //ビューポート設定
            this._d3d_device.Viewport = vp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._d3d_device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            return(scale);
        }
        /**
         * この関数は、インスタンスを初期化します。
         * コンストラクタから呼び出します。
         * @see NyARDetectMarker#NyARDetectMarker(NyARParam, NyARCode[], double[], int, int)
         * @param i_ref_param
         * Check see also
         * @param i_ref_code
         * Check see also
         * @param i_marker_width
         * Check see also
         * @param i_number_of_code
         * Check see also
         * @param i_input_raster_type
         * Check see also
         * @
         */
        protected void initInstance(
            NyARParam i_ref_param,
            NyARCode[] i_ref_code,
            double[] i_marker_width,
            int i_number_of_code)
        {
            NyARIntSize scr_size = i_ref_param.getScreenSize();
            // 解析オブジェクトを作る
            int cw = i_ref_code[0].getWidth();
            int ch = i_ref_code[0].getHeight();

            this._transmat = new NyARTransMat(i_ref_param);
            //NyARToolkitプロファイル
            this._square_detect = new RleDetector(new NyARColorPatt_Perspective(cw, ch, 4, 25), i_ref_code, i_number_of_code, i_ref_param);

            //実サイズ保存
            this._offset = NyARRectOffset.createArray(i_number_of_code);
            for (int i = 0; i < i_number_of_code; i++)
            {
                this._offset[i].setSquare(i_marker_width[i]);
            }
            //2値画像バッファを作る
            this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h);
            return;
        }
示例#17
0
        protected void initInstance(
            NyARParam i_ref_param,
            NyARCode[] i_ref_code,
            double[] i_marker_width,
            int i_number_of_code,
            int i_input_raster_type)
        {
            NyARIntSize scr_size = i_ref_param.getScreenSize();
            // 解析オブジェクトを作る
            int cw = i_ref_code[0].getWidth();
            int ch = i_ref_code[0].getHeight();

            //detectMarkerのコールバック関数
            this._detect_cb = new DetectSquareCB(
                new NyARColorPatt_Perspective_O2(cw, ch, 4, 25),
                i_ref_code, i_number_of_code, i_ref_param);
            this._transmat = new NyARTransMat(i_ref_param);
            //NyARToolkitプロファイル
            this._square_detect = new NyARSquareContourDetector_Rle(i_ref_param.getScreenSize());
            this._tobin_filter  = new NyARRasterFilter_ARToolkitThreshold(100, i_input_raster_type);

            //実サイズ保存
            this._offset = NyARRectOffset.createArray(i_number_of_code);
            for (int i = 0; i < i_number_of_code; i++)
            {
                this._offset[i].setSquare(i_marker_width[i]);
            }
            //2値画像バッファを作る
            this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h);
            return;
        }
 public DetectSquareCB(INyARColorPatt i_inst_patt, NyARCode i_ref_code, NyARParam i_param)
 {
     this._inst_patt      = i_inst_patt;
     this._deviation_data = new NyARMatchPattDeviationColorData(i_ref_code.getWidth(), i_ref_code.getHeight());
     this._coordline      = new Coord2Linear(i_param.getScreenSize(), i_param.getDistortionFactor());
     this._match_patt     = new NyARMatchPatt_Color_WITHOUT_PCA(i_ref_code);
     return;
 }
示例#19
0
 public NyARSingleCameraView(NyARParam i_ref_cparam)
 {
     this._ref_param = i_ref_cparam;
     this._frustum   = new NyARFrustum();
     this._observer  = new ObserverList(3);
     this.setClipping(FRUSTUM_DEFAULT_NEAR_CLIP, FRUSTUM_DEFAULT_FAR_CLIP);
     return;
 }
示例#20
0
 /**
  * コンストラクタ。
  * 樽型歪みが少ない、または補正済みの画像を入力するときには、{@link #NyARReality(NyARIntSize, double, double, NyARPerspectiveProjectionMatrix, NyARCameraDistortionFactor, int, int)}
  * のi_dist_factorにnullを指定すると、より高速な動作が期待できます。
  * @param i_param
  * カメラパラメータを指定します。
  * @param i_near
  * 視錐体のnear-pointをmm単位で指定します。
  * default値は{@link #FRASTRAM_ARTK_NEAR}です。
  * @param i_far
  * 視錐体のfar-pointをmm単位で指定します。
  * default値は{@link #FRASTRAM_ARTK_FAR}です。
  * @param i_max_known_target
  * Knownターゲットの最大数を指定します。
  * @param i_max_unknown_target
  * UnKnownターゲットの最大数を指定します。
  * @throws NyARException
  */
 public NyARReality(NyARParam i_param, double i_near, double i_far, int i_max_known_target, int i_max_unknown_target)
 {
     //定数とかいろいろ
     this.MAX_LIMIT_KNOWN   = i_max_known_target;
     this.MAX_LIMIT_UNKNOWN = i_max_unknown_target;
     this.InitInstance(i_param.getScreenSize(), i_near, i_far, i_param.getPerspectiveProjectionMatrix(), i_param.getDistortionFactor());
     return;
 }
 public DetectSquareCB(NyARParam i_param, INyIdMarkerDataEncoder i_encoder)
 {
     this._coordline    = new Coord2Linear(i_param.getScreenSize(), i_param.getDistortionFactor());
     this._data_temp    = i_encoder.createDataInstance();
     this._current_data = i_encoder.createDataInstance();
     this._encoder      = i_encoder;
     return;
 }
 public DetectSquare(NyARParam i_param, int i_raster_type)
     : base(i_param.getScreenSize())
 {
     this._match_patt  = null;
     this._coordline   = new NyARCoord2Linear(i_param.getScreenSize(), i_param.getDistortionFactor());
     this._raster_type = i_raster_type;
     return;
 }
        private void InitARConfigs()
        {
            NyARParam ap   = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 800, 600);
            NyARCode  code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE2), 16, 16);

            this.MyArRaster           = new DsRgbRaster(800, 600);
            this.MySingleDetectMarker = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.MySingleDetectMarker.setContinueMode(false);
        }
示例#24
0
 public NyARIcp(NyARParam i_param)
 {
     this._ref_matXc2U              = i_param.getPerspectiveProjectionMatrix();
     this._maxLoop                  = ICP_MAX_LOOP;
     this.breakLoopErrorThresh      = ICP_BREAK_LOOP_ERROR_THRESH;
     this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
     this.breakLoopErrorThresh2     = ICP_BREAK_LOOP_ERROR_THRESH2;
     this.inlierProb                = ICP_INLIER_PROBABILITY;
 }
示例#25
0
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);
            //キャプチャを作る(QVGAでフレームレートは30)
            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;

            //ARラスタを作る(DirectShowキャプチャ仕様)。
            this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height);

            //AR用カメラパラメタファイルをロードして設定
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), SCREEN_WIDTH, SCREEN_HEIGHT);

            //Direct3d用のユーティリティ準備

            //プロセッサの準備
            this._processor = new MarkerProcessor(ap, this._raster.getBufferType());
            NyARCode[] codes = new NyARCode[2];
            codes[0] = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE1), 16, 16);
            codes[1] = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE2), 16, 16);
            this._processor.setARCodeTable(codes, 16, 80.0);


            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;
            this._device.RenderState.CullMode      = Cull.CounterClockwise;

            Viewport vp = new Viewport();

            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //ビューポート設定
            this._device.Viewport = vp;

            this._text = new TextPanel(this._device, 1);
            //カメラProjectionの設定
            Matrix tmp = new Matrix();

            NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp);
            this._device.Transform.Projection = tmp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));

            //背景サーフェイスを作成
            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            return(true);
        }
 public RleDetector(NyARParam i_param, INyIdMarkerDataEncoder i_encoder, NyIdMarkerPickup i_id_pickup) : base(i_param.getScreenSize())
 {
     this._coordline    = new NyARCoord2Linear(i_param.getScreenSize(), i_param.getDistortionFactor());
     this._data_temp    = i_encoder.createDataInstance();
     this._current_data = i_encoder.createDataInstance();
     this._encoder      = i_encoder;
     this._id_pickup    = i_id_pickup;
     return;
 }
示例#27
0
 public NyARIcp(NyARParam i_param)
 {
     this._ref_matXc2U = i_param.getPerspectiveProjectionMatrix();
     this._maxLoop = ICP_MAX_LOOP;
     this.breakLoopErrorThresh = ICP_BREAK_LOOP_ERROR_THRESH;
     this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
     this.breakLoopErrorThresh2 = ICP_BREAK_LOOP_ERROR_THRESH2;
     this.inlierProb = ICP_INLIER_PROBABILITY;
 }
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);
            //キャプチャを作る(QVGAでフレームレートは30)
            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;

            //AR用カメラパラメタファイルをロードして設定
            NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE));

            ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT);

            //マーカライブラリ(NyId)の構築
            this._mklib = new RawbitSerialIdTable(10);
            //マーカサイズテーブルの作成(とりあえず全部8cm)
            this._mklib.addAnyItem("any id", 80);

            //Realityの準備
            this._reality        = new NyARRealityD3d(ap, 10, 10000, 2, 10);
            this._reality_source = new NyARRealitySource_DShow(SCREEN_WIDTH, SCREEN_HEIGHT, null, 2, 100);

            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;



            //カメラProjectionの設定
            Matrix tmp = new Matrix();

            this._reality.getD3dCameraFrustum(ref tmp);
            this._device.Transform.Projection = tmp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            Viewport vp = new Viewport();

            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //ビューポート設定
            this._device.Viewport = vp;

            //カラーキューブの描画インスタンス
            this._cube = new ColorCube(this._device, 40);
            //背景サーフェイスを作成
            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            return(true);
        }
        /**
         * コンストラクタです。
         * @param i_config
         * NFTのコンフィギュレーションオブジェクトを指定します。
         */
        public NyARNftSystem(INyARNftSystemConfig i_config)
            : base(i_config.getNyARSingleCameraView())
        {
            NyARParam cparam = this._view.getARParam();

            this._surface_tracker = new NyARSurfaceTracker(cparam, 16, 0.5);
            this._sftrackingutils = new NyARSurfaceTrackingTransmatUtils(cparam, 5.0);
            this._kpm_worker      = new KpmWorker(cparam, this._nftdatalist);
            this._kpm_thread      = new Thread(this._kpm_worker.run);
            this._kpm_thread.Start(this._kpm_thread);
        }
 public NyARTransMat_ARToolKit(NyARParam i_param)
 {
     NyARCameraDistortionFactor dist = i_param.getDistortionFactor();
     NyARPerspectiveProjectionMatrix pmat = i_param.getPerspectiveProjectionMatrix();
     this._transsolver = new NyARTransportVectorSolver_ARToolKit(pmat);
     //互換性が重要な時は、NyARRotMatrix_ARToolKitを使うこと。
     //理屈はNyARRotMatrix_NyARToolKitもNyARRotMatrix_ARToolKitも同じだけど、少しだけ値がずれる。
     this._rotmatrix = new NyARRotMatrix_ARToolKit_O2(pmat);
     this._mat_optimize = new NyARRotMatrixOptimize_O2(pmat);
     this._ref_dist_factor = dist;
 }
示例#31
0
 public OnSquareDetect(
     NyARParam i_params,
     ARMarkerList i_armk_list, NyIdList i_idmk_list, ARPlayCardList i_psmk_list,
     TrackingList i_tracking_list, int i_initial_stack_size)
 {
     this._coordline         = new NyARCoord2Linear(i_params.getScreenSize(), i_params.getDistortionFactor());
     this._ref_armk_list     = i_armk_list;
     this._ref_idmk_list     = i_idmk_list;
     this._ref_psmk_list     = i_psmk_list;
     this._ref_tracking_list = i_tracking_list;
     this._sq_stack          = new SquareStack(i_initial_stack_size);
 }
        public NyARTransMat_ARToolKit(NyARParam i_param)
        {
            NyARCameraDistortionFactor      dist = i_param.getDistortionFactor();
            NyARPerspectiveProjectionMatrix pmat = i_param.getPerspectiveProjectionMatrix();

            this._transsolver = new NyARTransportVectorSolver_ARToolKit(pmat);
            //互換性が重要な時は、NyARRotMatrix_ARToolKitを使うこと。
            //理屈はNyARRotMatrix_NyARToolKitもNyARRotMatrix_ARToolKitも同じだけど、少しだけ値がずれる。
            this._rotmatrix       = new NyARRotMatrix_ARToolKit_O2(pmat);
            this._mat_optimize    = new NyARRotMatrixOptimize_O2(pmat);
            this._ref_dist_factor = dist;
        }
示例#33
0
        public NyARIcpStereo(NyARParam i_param_l, NyARParam i_param_r, NyARDoubleMatrix44 i_matC2_l, NyARDoubleMatrix44 i_matC2_r)
        {
            this.maxLoop = ICP_MAX_LOOP;
            this.breakLoopErrorThresh = ICP_BREAK_LOOP_ERROR_THRESH;
            this.breakLoopErrorRatioThresh = ICP_BREAK_LOOP_ERROR_RATIO_THRESH;
            this.breakLoopErrorThresh2 = ICP_BREAK_LOOP_ERROR_THRESH2;
            this.inlierProb = ICP_INLIER_PROBABILITY;

            this._ref_matXcl2Ul = i_param_l.getPerspectiveProjectionMatrix();
            this._ref_matXcr2Ur = i_param_r.getPerspectiveProjectionMatrix();
            this._matC2L = i_matC2_l;
            this._matC2R = i_matC2_r;
            return;
        }
示例#34
0
 /**
  * The constructor.
  * @param i_param
  * ARToolkit parameter object that finished setup.
  * @param i_al_mode
  * fitting algorism type.
  * @throws NyARException
  */
 public NyARIcpTransMat(NyARParam i_param, int i_al_mode)
 {
     this._icpc = new NyARIcpPlane(i_param);
     switch (i_al_mode)
     {
         case AL_POINT:
             this._icpp = new NyARIcpPoint(i_param);
             break;
         case AL_POINT_ROBUST:
             this._icpp = new NyARIcpPointRobust(i_param);
             break;
         default:
             throw new System.ArgumentException();
     }
     return;
 }
示例#35
0
        private Matrix GetProjectionMatrix(NyARParam i_arparam, float near, float far)
        {
            NyARMat trans_mat = new NyARMat(3, 4);
            NyARMat icpara_mat = new NyARMat(3, 4);
            double[,] p = new double[3, 3], q = new double[4, 4];
            int width, height;
            int i, j;

            NyARIntSize size = i_arparam.getScreenSize();
            width = size.w;
            height = size.h;

            i_arparam.getPerspectiveProjectionMatrix().decompMat(icpara_mat, trans_mat);

            double[][] icpara = icpara_mat.getArray();
            double[][] trans = trans_mat.getArray();

            for (i = 0; i < 3; i++)
            {
                for (j = 0; j < 3; j++)
                {
                    p[i, j] = icpara[i][j] / icpara[2][2];
                }
            }

            q[0, 0] = (2.0 * p[0, 0] / (width));
            q[0, 1] = (2.0 * p[0, 1] / (width));
            q[0, 2] = ((2.0 * p[0, 2] / (width)) - 1.0);
            q[0, 3] = 0.0;

            q[1, 0] = 0.0;
            q[1, 1] = (2.0 * p[1, 1] / (height));
            q[1, 2] = ((2.0 * p[1, 2] / (height)) - 1.0);
            q[1, 3] = 0.0;

            q[2, 0] = 0.0;
            q[2, 1] = 0.0;
            q[2, 2] = (far + near) / (far - near);
            q[2, 3] = -2.0 * far * near / (far - near);

            q[3, 0] = 0.0;
            q[3, 1] = 0.0;
            q[3, 2] = 1.0;
            q[3, 3] = 0.0;

            Matrix mat = Matrix.Identity;
            mat.M11 = (float)(q[0, 0] * trans[0][0] + q[0, 1] * trans[1][0] + q[0, 2] * trans[2][0]);
            mat.M12 = (float)(q[1, 0] * trans[0][0] + q[1, 1] * trans[1][0] + q[1, 2] * trans[2][0]);
            mat.M13 = (float)(q[2, 0] * trans[0][0] + q[2, 1] * trans[1][0] + q[2, 2] * trans[2][0]);
            mat.M14 = (float)(q[3, 0] * trans[0][0] + q[3, 1] * trans[1][0] + q[3, 2] * trans[2][0]);
            mat.M21 = (float)(q[0, 1] * trans[0][1] + q[0, 1] * trans[1][1] + q[0, 2] * trans[2][1]);
            mat.M22 = (float)(q[1, 1] * trans[0][1] + q[1, 1] * trans[1][1] + q[1, 2] * trans[2][1]);
            mat.M23 = (float)(q[2, 1] * trans[0][1] + q[2, 1] * trans[1][1] + q[2, 2] * trans[2][1]);
            mat.M24 = (float)(q[3, 1] * trans[0][1] + q[3, 1] * trans[1][1] + q[3, 2] * trans[2][1]);
            mat.M31 = (float)(q[0, 2] * trans[0][2] + q[0, 1] * trans[1][2] + q[0, 2] * trans[2][2]);
            mat.M32 = (float)(q[1, 2] * trans[0][2] + q[1, 1] * trans[1][2] + q[1, 2] * trans[2][2]);
            mat.M33 = -(float)(q[2, 2] * trans[0][2] + q[2, 1] * trans[1][2] + q[2, 2] * trans[2][2]);
            mat.M34 = -(float)(q[3, 2] * trans[0][2] + q[3, 1] * trans[1][2] + q[3, 2] * trans[2][2]);
            mat.M41 = (float)(q[0, 3] * trans[0][3] + q[0, 1] * trans[1][3] + q[0, 2] * trans[2][3] + q[0, 3]);
            mat.M42 = (float)(q[1, 3] * trans[0][3] + q[1, 1] * trans[1][3] + q[1, 2] * trans[2][3] + q[1, 3]);
            mat.M43 = (float)(q[2, 3] * trans[0][3] + q[2, 1] * trans[1][3] + q[2, 2] * trans[2][3] + q[2, 3]);
            mat.M44 = (float)(q[3, 3] * trans[0][3] + q[3, 1] * trans[1][3] + q[3, 2] * trans[2][3] + q[3, 3]);

            return mat;
        }
 /**
  * コンストラクタです。
  * 座標計算に必要なカメラパラメータの参照値を元に、インスタンスを生成します。
  * @param i_param
  * ARToolKit形式のカメラパラメータです。
  * インスタンスは、この中から樽型歪み矯正オブジェクト、射影変換オブジェクトを参照します。
  * @
  */
 public NyARTransMat_ARToolKit(NyARParam i_param)
     : this(i_param.getDistortionFactor(), i_param.getPerspectiveProjectionMatrix())
 {
 }
 /**
  * コンストラクタです。
  * 座標計算に必要なカメラパラメータの参照値を元に、インスタンスを生成します。
  * @param i_param
  * ARToolKit形式のカメラパラメータです。
  * インスタンスは、この中から樽型歪み矯正オブジェクト、射影変換オブジェクトを参照します。
  * @
  */
 public NyARTransMat_ARToolKit(NyARParam i_param)
 {
     initInstance(i_param.getDistortionFactor(), i_param.getPerspectiveProjectionMatrix());
 }
示例#38
0
        public void InitTracker(params object[] configs)
        {
            if (!(configs.Length == 3 || configs.Length == 5))
            {
                throw new MarkerException("Problem in InitTracker in NewNyAR");
            }

            int imgWidth = 0;
            int imgHeight = 0;
            try
            {
                imgWidth = (int)configs[0];
                imgHeight = (int)configs[1];
                configFilename = (String)configs[2];
                if (configs.Length == 5)
                {
                    threshold = (int)configs[3];
                    continuousMode = (bool)configs[4];
                }
                else
                {
                    threshold = 100;
                    continuousMode = false;
                }
            }
            catch (Exception)
            {
                throw new MarkerException("Problem in InitTracker in NewNyAR");
            }

            nyARIntSize = new NyARIntSize(imgWidth, imgHeight);

            param = new NyARParam();
            param.loadARParam(TitleContainer.OpenStream(configFilename));
            param.changeScreenSize(nyARIntSize.w, nyARIntSize.h);

            camProjMat = GetProjectionMatrix(param, zNearPlane, zFarPlane);

            INyARMarkerSystemConfig arMarkerSystemConfig = new NyARMarkerSystemConfig(param);
            markerSystem = new NyARMarkerSystem(arMarkerSystemConfig);

            initialized = true;
        }
示例#39
0
 public NyARIcpPoint(NyARParam i_param)
     : base(i_param)
 {
 }
 public NyARIcpStereoPoint(NyARParam i_param_l, NyARParam i_param_r,
         NyARDoubleMatrix44 i_matC2_l, NyARDoubleMatrix44 i_matC2_r)
     : base(i_param_l, i_param_r, i_matC2_l, i_matC2_r)
 {
     throw new NyARException("This function is not checked.");
 }