public void Test() { //AR用カメラパラメタファイルをロード NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file)); ap.changeScreenSize(320, 240); //試験イメージの読み出し(320x240 RGBのRAWデータ) StreamReader sr = new StreamReader(data_file); BinaryReader bs = new BinaryReader(sr.BaseStream); byte[] raw = bs.ReadBytes(320 * 240 * 3); NyARRgbRaster ra = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_R8G8B8_24, false); ra.wrapBuffer(raw); MarkerProcessor pr = new MarkerProcessor(ap, ra.getBufferType()); pr.detectMarker(ra); Console.WriteLine(pr.transmat.m00 + "," + pr.transmat.m01 + "," + pr.transmat.m02 + "," + pr.transmat.m03); Console.WriteLine(pr.transmat.m10 + "," + pr.transmat.m11 + "," + pr.transmat.m12 + "," + pr.transmat.m13); Console.WriteLine(pr.transmat.m20 + "," + pr.transmat.m21 + "," + pr.transmat.m22 + "," + pr.transmat.m23); Console.WriteLine(pr.transmat.m30 + "," + pr.transmat.m31 + "," + pr.transmat.m32 + "," + pr.transmat.m33); Console.WriteLine(pr.current_id); return; }
public Form1() { InitializeComponent(); //ARの設定 //AR用カメラパラメタファイルをロード NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(320, 240); //AR用のパターンコードを読み出し NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE), 16, 16); NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44(); //計算モードの設定 //キャプチャを作る /************************************************** * このコードは、0番目(一番初めに見つかったキャプチャデバイス) * を使用するようにされています。 * 複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。 * n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。 * 手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。 **************************************************/ CaptureDeviceList cl = new CaptureDeviceList(); CaptureDevice cap = cl[0]; cap.SetCaptureListener(this); cap.PrepareCapture(320, 240, 30); this.m_cap = cap; //ラスタを作る。 this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height, NyARBufferType.OBJECT_CS_Bitmap); //1パターンのみを追跡するクラスを作成 this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0); this.m_ar.setContinueMode(false); }
public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device) { topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT); //キャプチャを作る(QVGAでフレームレートは30) i_cap_device.SetCaptureListener(this); i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30); this._cap = i_cap_device; //AR用カメラパラメタファイルをロードして設定 NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); //マーカライブラリ(NyId)の構築 this._mklib = new RawbitSerialIdTable(10); //マーカサイズテーブルの作成(とりあえず全部8cm) this._mklib.addAnyItem("any id", 80); //Realityの準備 this._reality = new NyARRealityD3d(ap, 10, 10000, 2, 10); this._reality_source = new NyARRealitySource_DShow(SCREEN_WIDTH, SCREEN_HEIGHT, null, 2, 100); //3dデバイスを準備する this._device = PrepareD3dDevice(topLevelForm); this._device.RenderState.ZBufferEnable = true; this._device.RenderState.Lighting = false; //カメラProjectionの設定 Matrix tmp = new Matrix(); this._reality.getD3dCameraFrustum(ref tmp); this._device.Transform.Projection = tmp; // ビュー変換の設定(左手座標系ビュー行列で設定する) // 0,0,0から、Z+方向を向いて、上方向がY軸 this._device.Transform.View = Matrix.LookAtLH( new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f)); Viewport vp = new Viewport(); vp.Y = 0; vp.Height = ap.getScreenSize().h; vp.Width = ap.getScreenSize().w; vp.MaxZ = 1.0f; //ビューポート設定 this._device.Viewport = vp; //カラーキューブの描画インスタンス this._cube = new ColorCube(this._device, 40); //背景サーフェイスを作成 this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT); return(true); }
public void Test() { //AR用カメラパラメタファイルをロード NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(camera_file)); ap.changeScreenSize(320, 240); //AR用のパターンコードを読み出し NyARCode code = NyARCode.createFromARPattFile(new StreamReader(code_file), 16, 16); //試験イメージの読み出し(320x240 BGRAのRAWデータ) StreamReader sr = new StreamReader(data_file); BinaryReader bs = new BinaryReader(sr.BaseStream); byte[] raw = bs.ReadBytes(320 * 240 * 4); // NyARBitmapRaster ra = new NyARBitmapRaster(320, 240); // Graphics g = Graphics.FromImage(ra.getBitmap()); // g.DrawImage(new Bitmap("../../../../../data/320x240ABGR.png"), 0, 0); NyARRgbRaster ra = new NyARRgbRaster(320, 240, NyARBufferType.BYTE1D_B8G8R8X8_32, false); ra.wrapBuffer(raw); //1パターンのみを追跡するクラスを作成 NyARSingleDetectMarker ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT); NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44(); ar.setContinueMode(false); ar.detectMarkerLite(ra, 100); ar.getTransmationMatrix(result_mat); //マーカーを検出 Stopwatch sw = new Stopwatch(); sw.Start(); for (int i = 0; i < 1000; i++) { //変換行列を取得 ar.detectMarkerLite(ra, 100); ar.getTransmationMatrix(result_mat); } Console.WriteLine(result_mat.m00 + "," + result_mat.m01 + "," + result_mat.m02 + "," + result_mat.m03); Console.WriteLine(result_mat.m10 + "," + result_mat.m11 + "," + result_mat.m12 + "," + result_mat.m13); Console.WriteLine(result_mat.m20 + "," + result_mat.m21 + "," + result_mat.m22 + "," + result_mat.m23); Console.WriteLine(result_mat.m30 + "," + result_mat.m31 + "," + result_mat.m32 + "," + result_mat.m33); sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds + "[ms]"); return; }
public void Test() { try { NyARParam param = NyARParam.createFromARParamFile(new StreamReader(PARAM_FILE)); param.changeScreenSize(320, 240); NyARReality reality = new NyARReality(param.getScreenSize(), 10, 1000, param.getPerspectiveProjectionMatrix(), null, 10, 10); NyARRealitySource reality_in = new NyARRealitySource_Reference(320, 240, null, 2, 100, NyARBufferType.BYTE1D_B8G8R8X8_32); //試験イメージの読み出し(320x240 BGRAのRAWデータ) StreamReader sr = new StreamReader(DATA_FILE); BinaryReader bs = new BinaryReader(sr.BaseStream); byte[] raw = bs.ReadBytes(320 * 240 * 4); Array.Copy(raw, (byte[])reality_in.refRgbSource().getBuffer(), raw.Length); Stopwatch sw = new Stopwatch(); sw.Start(); for (int i = 0; i < 1000; i++) { reality.progress(reality_in); } sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds + "[ms]"); Console.WriteLine(reality.getNumberOfKnown()); Console.WriteLine(reality.getNumberOfUnknown()); Console.WriteLine(reality.getNumberOfDead()); NyARRealityTarget[] rt = new NyARRealityTarget[10]; reality.selectUnKnownTargets(rt); reality.changeTargetToKnown(rt[0], 2, 80); Console.WriteLine(rt[0]._transform_matrix.m00 + "," + rt[0]._transform_matrix.m01 + "," + rt[0]._transform_matrix.m02 + "," + rt[0]._transform_matrix.m03); Console.WriteLine(rt[0]._transform_matrix.m10 + "," + rt[0]._transform_matrix.m11 + "," + rt[0]._transform_matrix.m12 + "," + rt[0]._transform_matrix.m13); Console.WriteLine(rt[0]._transform_matrix.m20 + "," + rt[0]._transform_matrix.m21 + "," + rt[0]._transform_matrix.m22 + "," + rt[0]._transform_matrix.m23); Console.WriteLine(rt[0]._transform_matrix.m30 + "," + rt[0]._transform_matrix.m31 + "," + rt[0]._transform_matrix.m32 + "," + rt[0]._transform_matrix.m33); } catch (Exception e) { Console.WriteLine(e.StackTrace); } return; }
public void AR_Initialization(Control topLevelForm) { //Camera parameters NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); //List of patterns ar_code = new NyARCode[Constante.PATT_MAX]; //Pattern files AR_CODE_FILES = new String[Constante.PATT_MAX]; //Pattern index list ar_code_index = new int[Constante.PATT_MAX]; AR_CODE_FILES[0] = "data/patt_U.dat"; AR_CODE_FILES[1] = "data/patt_S.dat"; AR_CODE_FILES[2] = "data/patt_T.dat"; AR_CODE_FILES[3] = "data/patt_H.dat"; AR_CODE_FILES[4] = "data/patt_B.dat"; for (int count = 0; count < Constante.PATT_MAX; count++) { ar_code[count] = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILES[count]), 16, 16); } marker_width = new double[Constante.PATT_MAX]; for (int count = 0; count < Constante.PATT_MAX; count++) { marker_width[count] = 80.0f; } this._ar = new NyARDetectMarker(ap, ar_code, marker_width, Constante.PATT_MAX); this._ar.setContinueMode(true); Matrix tmp = new Matrix(); NyARD3dUtil.toCameraFrustumRH(ap, 1, 2000, ref tmp); m_Device.Transform.Projection = tmp; // View Matrix: Vector3 camera_position = new Vector3(0, 0, -500); camera_position.Normalize(); //camera_position.Multiply(m_Range); m_Device.Transform.View = Matrix.LookAtLH( camera_position, new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 1.0f, 0.0f)); //m_Device.Transform.Projection = Matrix.PerspectiveFovLH((float)Math.PI / 4, SCREEN_WIDTH / SCREEN_HEIGHT, 1, 2000); Viewport vp = new Viewport(); vp.X = 0; vp.Y = 0; //vp.Height = ap.getScreenSize().h; //vp.Width = ap.getScreenSize().w; vp.Height = SCREEN_HEIGHT; vp.Width = SCREEN_WIDTH; vp.MaxZ = 1.0f; m_Device.Viewport = vp; this._surface = new NyARD3dSurface(m_Device, SCREEN_WIDTH, SCREEN_HEIGHT); NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat; int nb_marker_detected = 0; nb_marker_detected = this._ar.detectMarkerLite(this._raster, Constante.binarisation); if (nb_marker_detected > 0) { bestConfidence = 0; bestMarker = 0; //if the number of detected markers is bigger than the max number available we set it back to MAX number if (nb_marker_detected > Constante.MARK_MAX) { nb_marker_detected = Constante.MARK_MAX; } //get the best confidence from the detected markers for (int count = 0; count < nb_marker_detected; count++) { ar_code_index[count] = this._ar.getARCodeIndex(count); if (this._ar.getConfidence(count) > bestConfidence) { bestConfidence = this._ar.getConfidence(count); bestMarker = count; } //textBox1.Text += "bestConfidence: " + bestConfidence + " bestMarker: " + bestMarker+"\n"; } //textBox1.Text += "finally:\nbestConfidence: " + bestConfidence + " bestMarker: " + bestMarker+"\n\n"; try { //MessageBox.Show("bestMarker: " + bestMarker, "RenderForm.AR_Initialization"); this._ar.getTransmationMatrix(bestMarker, nyar_transmat); } catch (Exception x) { //MessageBox.Show(x.ToString(), "RenderForm.AR_Initialize"); } NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat); } this._nb_marker_detected = nb_marker_detected; { try { this._surface.setRaster(this._raster); } catch (Exception x) { //MessageBox.Show(x.ToString(), "RenderForm.AR_Initialization"); } } }
public NyARMarkerSystemConfig(StreamReader i_ar_param_stream, int i_width, int i_height) : this(NyARParam.createFromARParamFile(i_ar_param_stream)) { this._param.changeScreenSize(i_width, i_height); }
public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device) { topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT); i_cap_device.SetCaptureListener(this); i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30); this._cap = i_cap_device; //this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height,NyARBufferType.BYTE1D_B8G8R8X8_32); #region my code try { byte[] bimg = service.getb(); //if(bimg != null) { Image img = byteToImage(bimg); if (img != null) { //frm.textBox1.Text = img.ToString(); this._raster = new NyARBitmapRaster((Bitmap)img); } } //else } catch (Exception x) { //MessageBox.Show(x.ToString()); } #endregion NyARParam ap = NyARParam.createFromARParamFile(new StreamReader(AR_CAMERA_FILE)); ap.changeScreenSize(SCREEN_WIDTH, SCREEN_HEIGHT); NyARCode code = NyARCode.createFromARPattFile(new StreamReader(AR_CODE_FILE), 16, 16); this._ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0, NyARSingleDetectMarker.PF_NYARTOOLKIT); this._ar.setContinueMode(true); this._device = PrepareD3dDevice(topLevelForm); this._device.RenderState.ZBufferEnable = true; this._device.RenderState.Lighting = false; Matrix tmp = new Matrix(); NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp); this._device.Transform.Projection = tmp; this._device.Transform.View = Matrix.LookAtLH( new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f)); Viewport vp = new Viewport(); vp.X = 0; vp.Y = 0; vp.Height = ap.getScreenSize().h; vp.Width = ap.getScreenSize().w; vp.MaxZ = 1.0f; this._device.Viewport = vp; this._cube = new ColorCube(this._device, 40); this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT); this._is_marker_enable = false; return(true); }