Example #1
0
        protected NyARSingleDetectMarker(NyARParam i_ref_param, NyARCode i_ref_code, double i_marker_width)
        {
            this._deviation_data = new NyARMatchPattDeviationColorData(i_ref_code.getWidth(), i_ref_code.getHeight());
            this._match_patt     = new NyARMatchPatt_Color_WITHOUT_PCA(i_ref_code);
            this._offset         = new NyARRectOffset();
            this._offset.setSquare(i_marker_width);
            this._coordline = new NyARCoord2Linear(i_ref_param.getScreenSize(), i_ref_param.getDistortionFactor());
            //2値画像バッファを作る
            NyARIntSize s = i_ref_param.getScreenSize();

            this._bin_raster = new NyARBinRaster(s.w, s.h);
        }
        protected void initInstance(NyARParam i_param, int i_raster_type)
        {
            //初期化済?
            Debug.Assert(this._initialized == false);

            NyARIntSize scr_size = i_param.getScreenSize();

            // 解析オブジェクトを作る
            this._transmat     = new NyARTransMat(i_param);
            this._tobin_filter = new NyARRasterFilter_ARToolkitThreshold(110, i_raster_type);

            // 2値画像バッファを作る
            this._bin_raster       = new NyARBinRaster(scr_size.w, scr_size.h);
            this._threshold_detect = new NyARRasterThresholdAnalyzer_SlidePTile(15, i_raster_type, 4);
            this._initialized      = true;
            //コールバックハンドラ
            this._detectmarker = new DetectSquare(i_param, i_raster_type);
            this._offset       = new NyARRectOffset();
            return;
        }
        protected void initInstance(NyARParam i_param, INyIdMarkerDataEncoder i_encoder, double i_marker_width, int i_raster_format)
        {
            //初期化済?
            Debug.Assert(this._initialized == false);

            NyARIntSize scr_size = i_param.getScreenSize();

            // 解析オブジェクトを作る
            this._square_detect = new RleDetector(i_param, i_encoder);
            this._transmat      = new NyARTransMat(i_param);

            // 2値画像バッファを作る
            this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h);
            //ワーク用のデータオブジェクトを2個作る
            this._data_current     = i_encoder.createDataInstance();
            this._tobin_filter     = new NyARRasterFilter_ARToolkitThreshold(110, i_raster_format);
            this._threshold_detect = new NyARRasterThresholdAnalyzer_SlidePTile(15, i_raster_format, 4);
            this._initialized      = true;
            this._is_active        = false;
            this._offset           = new NyARRectOffset();
            this._offset.setSquare(i_marker_width);
            return;
        }
        protected void initInstance(
            INyARColorPatt i_patt_inst,
            NyARSquareContourDetector i_sqdetect_inst,
            INyARTransMat i_transmat_inst,
            INyARRasterFilter_Rgb2Bin i_filter,
            NyARParam i_ref_param,
            NyARCode i_ref_code,
            double i_marker_width)
        {
            NyARIntSize scr_size = i_ref_param.getScreenSize();

            // 解析オブジェクトを作る
            this._square_detect = i_sqdetect_inst;
            this._transmat      = i_transmat_inst;
            this._tobin_filter  = i_filter;
            //2値画像バッファを作る
            this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h);
            //_detect_cb
            this._detect_cb = new DetectSquareCB(i_patt_inst, i_ref_code, i_ref_param);
            //オフセットを作成
            this._offset = new NyARRectOffset();
            this._offset.setSquare(i_marker_width);
            return;
        }
Example #5
0
        /**
         * この関数は、ラスタから矩形を検出して、自己コールバック関数{@link #onSquareDetect}で通知します。
         * 実装クラスでは、矩形検出処理をして、結果を通知する処理を実装してください。
         * @param i_raster
         * 検出元のラスタ画像
         * @
         */
        public void detectMarker(NyARBinRaster i_raster, NyARSquareContourDetector.CbHandler i_cb)
        {
            NyARLabelingImage limage = this._limage;

            // ラベル数が0ならここまで
            int label_num = this._labeling.labeling(i_raster, this._limage);

            if (label_num < 1)
            {
                return;
            }

            NyARLabelingLabelStack stack = limage.getLabelStack();

            //ラベルをソートしておく
            stack.sortByArea();
            //
            NyARLabelingLabel[] labels = stack.getArray();

            // デカいラベルを読み飛ばし
            int i;

            for (i = 0; i < label_num; i++)
            {
                // 検査対象内のラベルサイズになるまで無視
                if (labels[i].area <= AR_AREA_MAX)
                {
                    break;
                }
            }
            int xsize = this._width;
            int ysize = this._height;
            NyARIntCoordinates coord = this._coord;

            int[] mkvertex = this.__detectMarker_mkvertex;

            NyARLabelOverlapChecker <NyARLabelingLabel> overlap = this._overlap_checker;

            //重なりチェッカの最大数を設定
            overlap.setMaxLabels(label_num);
            for (; i < label_num; i++)
            {
                NyARLabelingLabel label_pt = labels[i];
                int label_area             = label_pt.area;
                // 検査対象サイズよりも小さくなったら終了
                if (label_area < AR_AREA_MIN)
                {
                    break;
                }
                // クリップ領域が画面の枠に接していれば除外
                if (label_pt.clip_l == 1 || label_pt.clip_r == xsize - 2)
                {// if(wclip[i*4+0] == 1 || wclip[i*4+1] ==xsize-2){
                    continue;
                }
                if (label_pt.clip_t == 1 || label_pt.clip_b == ysize - 2)
                {// if( wclip[i*4+2] == 1 || wclip[i*4+3] ==ysize-2){
                    continue;
                }
                // 既に検出された矩形との重なりを確認
                if (!overlap.check(label_pt))
                {
                    // 重なっているようだ。
                    continue;
                }
                // 輪郭を取得
                if (!this._cpickup.getContour(limage, limage.getTopClipTangentX(label_pt), label_pt.clip_t, coord))
                {
                    continue;
                }
                //輪郭線をチェックして、矩形かどうかを判定。矩形ならばmkvertexに取得
                if (!this._coord2vertex.getVertexIndexes(coord, label_area, mkvertex))
                {
                    // 頂点の取得が出来なかった
                    continue;
                }
                //矩形を発見したことをコールバック関数で通知
                i_cb.detectMarkerCallback(coord, mkvertex);

                // 検出済の矩形の属したラベルを重なりチェックに追加する。
                overlap.push(label_pt);
            }
            return;
        }
Example #6
0
        /// <summary>
        /// Initializes the detector for single marker detection.
        /// </summary>
        /// <param name="width">The width of the buffer that will be used for detection.</param>
        /// <param name="height">The height of the buffer that will be used for detection.</param>
        /// <param name="nearPlane">The near view plane of the frustum.</param>
        /// <param name="farPlane">The far view plane of the frustum.</param>
        /// <param name="markers">A list of markers that should be detected.</param>
        /// <param name="bufferType">The type of the buffer.</param>
        /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param>
        protected async Task Initialize(int width, int height, double nearPlane, double farPlane, IList <Marker> markers, int bufferType, bool adaptive = false)
        {
            // Check arguments
            if (markers == null || !markers.Any())
            {
                throw new ArgumentNullException("markers");
            }

            // Member init
            this.bufferWidth  = width;
            this.bufferHeight = height;
            this.isAdaptive   = adaptive;

            // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers
            int segmentX        = markers[0].SegmentsX;
            int segmentY        = markers[0].SegmentsY;
            var patternMatchers = new List <PatternMatcher>(markers.Count);

            foreach (var marker in markers)
            {
                if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY)
                {
                    throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers");
                }
                patternMatchers.Add(new PatternMatcher(marker));
            }

            //// Load deafult camera calibration data
            //var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name;

            //var streamResInfoCam = Application.GetResourceStream(new Uri(asmName + ";component/data/Camera_Calibration_Default.dat", UriKind.Relative));
            //var cameraParameters = new NyARParam();
            //using (var cameraCalibrationDataStream = streamResInfoCam.Stream)
            //{
            //   cameraParameters.loadARParam(cameraCalibrationDataStream);
            //   cameraParameters.changeScreenSize(width, height);
            //}

            var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///UWPARToolkit/data/Camera_Calibration_Default.dat"));

            var cameraParameters = new NyARParam();

            using (var s = await file.OpenReadAsync())
            {
                var instr = s.AsStream();
                {
                    cameraParameters.loadARParam(instr);
                    cameraParameters.changeScreenSize(width, height);
                }
            }

            // Get projection matrix from camera calibration data
            this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane);

            // Init detector and necessary data
            var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25);
            var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY);

            this.squareDetector          = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize());
            this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData);

            // Init buffer members
            this.filteredBuffer = new NyARBinRaster(width, height);
            if (adaptive)
            {
                this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType);
            }
            else
            {
                this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType);
            }
        }
        /// <summary>
        /// Initializes the detector for single marker detection.
        /// </summary>
        /// <param name="width">The width of the buffer that will be used for detection.</param>
        /// <param name="height">The height of the buffer that will be used for detection.</param>
        /// <param name="nearPlane">The near view plane of the frustum.</param>
        /// <param name="farPlane">The far view plane of the frustum.</param>
        /// <param name="markers">A list of markers that should be detected.</param>
        /// <param name="bufferType">The type of the buffer.</param>
        /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param>
        protected void Initialize(int width, int height, double nearPlane, double farPlane, IList <Marker> markers, int bufferType, bool adaptive = false)
        {
            // Check arguments
            if (markers == null || !markers.Any())
            {
                throw new ArgumentNullException("markers");
            }

            // Member init
            this.bufferWidth  = width;
            this.bufferHeight = height;
            this.isAdaptive   = adaptive;

            // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers
            int segmentX        = markers[0].SegmentsX;
            int segmentY        = markers[0].SegmentsY;
            var patternMatchers = new List <PatternMatcher>(markers.Count);

            foreach (var marker in markers)
            {
                if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY)
                {
                    throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers");
                }
                patternMatchers.Add(new PatternMatcher(marker));
            }

            // Load deafult camera calibration data
            string       location         = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
            StreamReader reader           = new StreamReader(location + "/Content/Data/Camera_Calibration_1280x720.dat");
            var          cameraParameters = new NyARParam();

            using (var cameraCalibrationDataStream = reader.BaseStream)
            {
                cameraParameters.loadARParam(cameraCalibrationDataStream);
                cameraParameters.changeScreenSize(width, height);
            }

            //var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name;

            //var uri = new Uri(asmName + ";component/Assets/data/Camera_Calibration_1280x720.dat", UriKind.Relative);
            //var streamResInfoCam = Application.GetResourceStream(uri);
            //if (null == streamResInfoCam)
            //    throw new FileNotFoundException("Application.GetResourceStream returned null", uri.OriginalString);

            //var cameraParameters = new NyARParam();
            //using (var cameraCalibrationDataStream = streamResInfoCam.Stream)
            //{
            //    cameraParameters.loadARParam(cameraCalibrationDataStream);
            //    cameraParameters.changeScreenSize(width, height);
            //}

            // Get projection matrix from camera calibration data
            this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane);

            // Init detector and necessary data
            var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25);
            var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY);

            this.squareDetector          = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize());
            this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData);

            // Init buffer members
            this.filteredBuffer = new NyARBinRaster(width, height);
            if (adaptive)
            {
                this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType);
            }
            else
            {
                this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType);
            }
        }
        /**
         * この関数は、ラスタから矩形を検出して、自己コールバック関数{@link #onSquareDetect}で通知します。
         * 実装クラスでは、矩形検出処理をして、結果を通知する処理を実装してください。
         * @param i_raster
         * 検出元のラスタ画像
         * @
         */
        public void detectMarker(NyARBinRaster i_raster, NyARSquareContourDetector.CbHandler i_cb)
        {
            NyARLabelingImage limage = this._limage;

            // ラベル数が0ならここまで
            int label_num = this._labeling.labeling(i_raster, this._limage);
            if (label_num < 1)
            {
                return;
            }

            NyARLabelingLabelStack stack = limage.getLabelStack();
            //ラベルをソートしておく
            stack.sortByArea();
            //
            NyARLabelingLabel[] labels = stack.getArray();

            // デカいラベルを読み飛ばし
            int i;
            for (i = 0; i < label_num; i++)
            {
                // 検査対象内のラベルサイズになるまで無視
                if (labels[i].area <= AR_AREA_MAX)
                {
                    break;
                }
            }
            int xsize = this._width;
            int ysize = this._height;
            NyARIntCoordinates coord = this._coord;
            int[] mkvertex = this.__detectMarker_mkvertex;

            NyARLabelOverlapChecker<NyARLabelingLabel> overlap = this._overlap_checker;

            //重なりチェッカの最大数を設定
            overlap.setMaxLabels(label_num);
            for (; i < label_num; i++)
            {
                NyARLabelingLabel label_pt = labels[i];
                int label_area = label_pt.area;
                // 検査対象サイズよりも小さくなったら終了
                if (label_area < AR_AREA_MIN)
                {
                    break;
                }
                // クリップ領域が画面の枠に接していれば除外
                if (label_pt.clip_l == 1 || label_pt.clip_r == xsize - 2)
                {// if(wclip[i*4+0] == 1 || wclip[i*4+1] ==xsize-2){
                    continue;
                }
                if (label_pt.clip_t == 1 || label_pt.clip_b == ysize - 2)
                {// if( wclip[i*4+2] == 1 || wclip[i*4+3] ==ysize-2){
                    continue;
                }
                // 既に検出された矩形との重なりを確認
                if (!overlap.check(label_pt))
                {
                    // 重なっているようだ。
                    continue;
                }
                // 輪郭を取得
                if (!this._cpickup.getContour(limage, limage.getTopClipTangentX(label_pt), label_pt.clip_t, coord))
                {
                    continue;
                }
                //輪郭線をチェックして、矩形かどうかを判定。矩形ならばmkvertexに取得
                if (!this._coord2vertex.getVertexIndexes(coord, label_area, mkvertex))
                {
                    // 頂点の取得が出来なかった
                    continue;
                }
                //矩形を発見したことをコールバック関数で通知
                i_cb.detectMarkerCallback(coord, mkvertex);

                // 検出済の矩形の属したラベルを重なりチェックに追加する。
                overlap.push(label_pt);

            }
            return;
        }