public Form1() { InitializeComponent(); //ARの設定 //AR用カメラパラメタファイルをロード NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(320, 240); //AR用のパターンコードを読み出し NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE),16, 16); NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44(); //計算モードの設定 //キャプチャを作る /************************************************** このコードは、0番目(一番初めに見つかったキャプチャデバイス) を使用するようにされています。 複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。 n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。 手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。 **************************************************/ CaptureDeviceList cl=new CaptureDeviceList(); CaptureDevice cap=cl[0]; cap.SetCaptureListener(this); cap.PrepareCapture(320, 240,30); this.m_cap = cap; //ラスタを作る。 this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height,NyARBufferType.OBJECT_CS_Bitmap); //1パターンのみを追跡するクラスを作成 this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0); this.m_ar.setContinueMode(false); }
public SimpleLiteD3d(NyARToolkitCS topLevelForm, ResourceBuilder i_resource) { NyMath.initialize(); this._capture = i_resource.createWmCapture(); this._capture.setOnSample(this); this._d3dmgr = i_resource.createD3dManager(topLevelForm); this._back_ground = i_resource.createBackGround(this._d3dmgr); this._d3dcube = new ColorCube(this._d3dmgr.d3d_device,40); //AR用のパターンコードを読み出 NyARCode code = i_resource.createNyARCode(); //ARラスタを作る(DirectShowキャプチャ仕様)。 this.m_raster = i_resource.createARRaster(); //1パターンのみを追跡するクラスを作成 this.m_ar = new NyARSingleDetectMarker(i_resource.ar_param, code, 80.0, this.m_raster.getBufferType()); //計算モードの設定 this.m_ar.setContinueMode(false); ////立方体(頂点数8)の準備 return; }
public void Test_arDetectMarkerLite() { Assembly assembly = Assembly.GetExecutingAssembly(); //AR用カメラパラメタファイルをロード NyARParam ap = new NyARParam(); ap.loadARParam(assembly.GetManifestResourceStream(RES_CAMERA)); ap.changeScreenSize(320, 240); //AR用のパターンコードを読み出し NyARCode code = new NyARCode(16, 16); Stream sr1=assembly.GetManifestResourceStream(RES_PATT); code.loadARPatt(new StreamReader(sr1)); //試験イメージの読み出し(320x240 BGRAのRAWデータ) StreamReader sr = new StreamReader(assembly.GetManifestResourceStream(RES_DATA)); BinaryReader bs = new BinaryReader(sr.BaseStream); byte[] raw = bs.ReadBytes(320 * 240 * 4); NyARRgbRaster_BGRA ra = new NyARRgbRaster_BGRA(320, 240,false); ra.wrapBuffer(raw); // Blank_Raster ra=new Blank_Raster(320, 240); //1パターンのみを追跡するクラスを作成 // NyARSingleDetectMarker_Quad ar = new NyARSingleDetectMarker_Quad(ap, code, 80.0); NyARSingleDetectMarker ar = new NyARSingleDetectMarker(ap, code, 80.0,ra.getBufferType()); NyARTransMatResult result_mat = new NyARTransMatResult(); ar.setContinueMode(false); ar.detectMarkerLite(ra, 100); ar.getTransmationMatrix(result_mat); //マーカーを検出 for (int i3 = 0; i3 < 10; i3++) { Stopwatch sw = new Stopwatch(); sw.Start(); for (int i = 0; i < 10; i++) { //変換行列を取得 ar.detectMarkerLite(ra, 100); ar.getTransmationMatrix(result_mat); } sw.Stop(); Debug.WriteLine(sw.ElapsedMilliseconds + "[ms]"); } return; }
public bool InitializeApplication(Form1 topLevelForm,CaptureDevice i_cap_device) { topLevelForm.ClientSize=new Size(SCREEN_WIDTH,SCREEN_HEIGHT); //キャプチャを作る(QVGAでフレームレートは30) i_cap_device.SetCaptureListener(this); i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30); this._cap = i_cap_device; //ARの設定 //ARラスタを作る(DirectShowキャプチャ仕様)。 this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height,NyARBufferType.BYTE1D_B8G8R8X8_32); //AR用カメラパラメタファイルをロードして設定 NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); //AR用のパターンコードを読み出し NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE),16, 16); //1パターンのみを追跡するクラスを作成 this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT); //計算モードの設定 this._ar.setContinueMode(true); //3dデバイスを準備する this._device = PrepareD3dDevice(topLevelForm); this._device.RenderState.ZBufferEnable = true; this._device.RenderState.Lighting = false; //カメラProjectionの設定 Matrix tmp = new Matrix(); NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(),ap.getScreenSize(),1, 10, 10000,ref tmp); this._device.Transform.Projection = tmp; // ビュー変換の設定(左手座標系ビュー行列で設定する) // 0,0,0から、Z+方向を向いて、上方向がY軸 this._device.Transform.View = Matrix.LookAtLH( new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f)); Viewport vp = new Viewport(); vp.X = 0; vp.Y = 0; vp.Height = ap.getScreenSize().h; vp.Width = ap.getScreenSize().w; vp.MaxZ = 1.0f; //ビューポート設定 this._device.Viewport = vp; //カラーキューブの描画インスタンス this._cube = new ColorCube(this._device, 40); //背景サーフェイスを作成 this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT); this._is_marker_enable = false; return true; }
private void buttonBookDemo_Click(object sender, EventArgs e) { if (bookDemo == false) { StopOtherApps(this, e); bookDemo = true; labelDemoName.Text = "Book"; buttonBookDemo.Text = "Stop Book"; labelDemoInstructions.Enabled = true; labelDemoInstructions.Text = "Book Demo Instructions:\n\n" + "Book \n"; //pictureBoxAlbum.Show(); lblResult.Hide(); //NYAR //initialize nyar components. NyARParam ap = new NyARParam(); ap.loadARParamFromFile(AR_CAMERA_FILE); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); _raster = new DsBGRX32Raster(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_WIDTH * 32 / 8); _utils = new NyARD3dUtil(); // For each pattern NyARCode code1 = new NyARCode(16, 16); code1.loadARPattFromFile(AR_CODE_FILE1); _ar1 = new NyARSingleDetectMarker(ap, code1, 80.0); _ar1.setContinueMode(false); NyARCode code2 = new NyARCode(16, 16); code2.loadARPattFromFile(AR_CODE_FILE2); _ar2 = new NyARSingleDetectMarker(ap, code2, 80.0); _ar2.setContinueMode(false); NyARCode code3 = new NyARCode(16, 16); code3.loadARPattFromFile(AR_CODE_FILE3); _ar3 = new NyARSingleDetectMarker(ap, code3, 80.0); _ar3.setContinueMode(false); NyARCode code4 = new NyARCode(16, 16); code4.loadARPattFromFile(AR_CODE_FILE4); _ar4 = new NyARSingleDetectMarker(ap, code4, 80.0); _ar4.setContinueMode(false); } else { bookDemo = false; labelDemoName.Text = "WUW"; buttonBookDemo.Text = "Book"; Cursor = Cursors.Arrow; labelDemoInstructions.Enabled = false; labelDemoInstructions.Text = ""; ResetEnvironment(); //pictureBoxAlbum.Hide(); lblResult.Show(); } }
private void nyar() { // - load the image to a bitmap Bitmap _latestFrameBitmap = (Bitmap)_latestFrame; // - create a new bitmap with diff. file format. PixelFormat.Format32bppArbg Bitmap _latestFrameShift = new Bitmap(_latestFrameBitmap.Width, _latestFrameBitmap.Height, PixelFormat.Format32bppArgb); _latestFrameShift.SetResolution(_latestFrameBitmap.HorizontalResolution, _latestFrameBitmap.VerticalResolution); // - copy the data from first bitmap to second. Graphics g = Graphics.FromImage(_latestFrameShift); g.DrawImage(_latestFrameBitmap, 0, 0); g.Dispose(); // - change the bitmap into an intptr Rectangle _latestFrameShiftRect = new Rectangle(0, 0, _latestFrameShift.Width, _latestFrameShift.Height); BitmapData _latestFrameShiftData = _latestFrameShift.LockBits(_latestFrameShiftRect, ImageLockMode.ReadWrite, _latestFrameShift.PixelFormat); IntPtr fakeBuffer = _latestFrameShiftData.Scan0; _latestFrameShift.UnlockBits(_latestFrameShiftData); // - use the fake buffer _raster.setBuffer(fakeBuffer); //Begin to DETECT. //Try all three. _ar1.detectMarkerLite(_raster, 110); _ar2.detectMarkerLite(_raster, 110); _ar3.detectMarkerLite(_raster, 110); _ar4.detectMarkerLite(_raster, 110); NyARSingleDetectMarker[] _arArray = new NyARSingleDetectMarker[4] { _ar1, _ar2, _ar3, _ar4 }; _arFinal = largestNyar(_arArray); is_marker_enable = _arFinal.detectMarkerLite(_raster, 110); if (is_marker_enable && _arFinal.getConfidence() > 0.3) { labelDemoName.Text = "Pattern #" + largestNyarIndex(_arArray) + "[" + _arFinal.getConfidence().ToString() + "]"; } else { labelDemoName.Text = "No Pattern"; } //display some feedback. pictureBoxAlbum.Image = _latestFrameShift; }
private int largestNyarIndex(NyARSingleDetectMarker[] nyarArray) { int maxIndex = 0; double maxConfidence = 0; for (int i = 0; i < nyarArray.Length; i++) { if (nyarArray[i].getConfidence() > maxConfidence) { maxConfidence = nyarArray[i].getConfidence(); maxIndex = i; } } return maxIndex; }
private NyARSingleDetectMarker largestNyar(NyARSingleDetectMarker[] nyarArray) { NyARSingleDetectMarker maxNyar = nyarArray[0]; double maxConfidence = 0; for (int i = 0; i < nyarArray.Length; i++) { if (nyarArray[i].getConfidence() > maxConfidence) { maxConfidence = nyarArray[i].getConfidence(); maxNyar = nyarArray[i]; } } return maxNyar; }
public RleDetector(NyARSingleDetectMarker i_parent, NyARIntSize i_size) : base(i_size) { this._parent = i_parent; }
public bool InitializeApplication(Form1 topLevelForm) { topLevelForm.ClientSize=new Size(SCREEN_WIDTH,SCREEN_HEIGHT); this._raster = new NyARBitmapRaster(new Bitmap(TEST_IMAGE)); //AR用カメラパラメタファイルをロードして設定 NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); //AR用のパターンコードを読み出し NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE),16, 16); //1パターンのみを追跡するクラスを作成 this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0,NyARSingleDetectMarker.PF_NYARTOOLKIT); //計算モードの設定 this._ar.setContinueMode(true); //3dデバイスを準備する this._device = PrepareD3dDevice(topLevelForm); this._device.RenderState.ZBufferEnable = true; this._device.RenderState.Lighting = false; //カメラProjectionの設定 Matrix tmp = new Matrix(); NyARD3dUtil.toCameraFrustumRH(ap, 10, 1000, ref tmp); this._device.Transform.Projection = tmp; // ビュー変換の設定(左手座標系ビュー行列で設定する) // 0,0,0から、Z+方向を向いて、上方向がY軸 this._device.Transform.View = Matrix.LookAtLH( new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f)); Viewport vp = new Viewport(); vp.X = 0; vp.Y = 0; vp.Height = ap.getScreenSize().h; vp.Width = ap.getScreenSize().w; vp.MaxZ = 1.0f; //ビューポート設定 this._device.Viewport = vp; //カラーキューブの描画インスタンス this._cube = new ColorCube(this._device, 40); //背景サーフェイスを作成 this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT); NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat; //マーカの認識 bool is_marker_enable = this._ar.detectMarkerLite(this._raster, 110); if (is_marker_enable) { //あればMatrixを計算 this._ar.getTransmationMatrix(nyar_transmat); NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat); } this._is_marker_enable = is_marker_enable; //サーフェイスへ背景をコピー this._surface.setRaster(this._raster); return true; }
public bool InitializeApplication(Form1 topLevelForm,CaptureDevice i_cap_device) { topLevelForm.ClientSize=new Size(SCREEN_WIDTH,SCREEN_HEIGHT); i_cap_device.SetCaptureListener(this); i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30); this._cap = i_cap_device; //this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height,NyARBufferType.BYTE1D_B8G8R8X8_32); #region my code try { byte[] bimg = service.getb(); //if(bimg != null) { Image img = byteToImage(bimg); if (img != null) { //frm.textBox1.Text = img.ToString(); this._raster = new NyARBitmapRaster((Bitmap)img); } } //else } catch (Exception x) { //MessageBox.Show(x.ToString()); } #endregion NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE),16, 16); this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT); this._ar.setContinueMode(true); this._device = PrepareD3dDevice(topLevelForm); this._device.RenderState.ZBufferEnable = true; this._device.RenderState.Lighting = false; Matrix tmp = new Matrix(); NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(),ap.getScreenSize(),1, 10, 10000,ref tmp); this._device.Transform.Projection = tmp; this._device.Transform.View = Matrix.LookAtLH( new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f)); Viewport vp = new Viewport(); vp.X = 0; vp.Y = 0; vp.Height = ap.getScreenSize().h; vp.Width = ap.getScreenSize().w; vp.MaxZ = 1.0f; this._device.Viewport = vp; this._cube = new ColorCube(this._device, 40); this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT); this._is_marker_enable = false; return true; }