public void doFilter(INyARRgbRaster i_input, NyARBinRaster i_output) { Debug.Assert(i_input.getSize().isEqualSize(i_output.getSize()) == true); this._do_threshold_impl.doThFilter(i_input, i_output, i_output.getSize()); return; }
public void doFilter(INyARRgbRaster i_input, NyARBinRaster i_output) { //INyARBufferReader in_buffer_reader = i_input.getBufferReader(); //INyARBufferReader out_buffer_reader = i_output.getBufferReader(); int in_buf_type = i_input.getBufferType(); NyARIntSize size = i_output.getSize(); Debug.Assert(i_output.isEqualBufferType(NyARBufferType.INT2D_BIN_8)); Debug.Assert(checkInputType(in_buf_type) == true); Debug.Assert(i_input.getSize().isEqualSize(size.w * 2, size.h * 2) == true); int[][] out_buf = (int[][])i_output.getBuffer(); switch (i_input.getBufferType()) { case NyARBufferType.BYTE1D_B8G8R8_24: case NyARBufferType.BYTE1D_R8G8B8_24: convert24BitRgb((byte[])i_input.getBuffer(), out_buf, size); break; case NyARBufferType.BYTE1D_B8G8R8X8_32: convert32BitRgbx((byte[])i_input.getBuffer(), out_buf, size); break; case NyARBufferType.WORD1D_R5G6B5_16LE: convert16BitRgb565word((short[])i_input.getBuffer(), out_buf, size); break; default: throw new NyARException(); } return; }
public void doFilter(NyARGrayscaleRaster i_input, NyARBinRaster i_output) { Debug.Assert(i_input.getBufferType() == NyARBufferType.INT1D_GRAY_8); Debug.Assert(i_output.getBufferType() == NyARBufferType.INT1D_BIN_8); int[] out_buf = (int[])i_output.getBuffer(); int[] in_buf = (int[])i_input.getBuffer(); NyARIntSize s = i_input.getSize(); int th = this._threshold; int bp = s.w * s.h - 1; int pix_count = s.h * s.w; int pix_mod_part = pix_count - (pix_count % 8); for (bp = pix_count - 1; bp >= pix_mod_part; bp--) { out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; } //タイリング for (; bp >= 0;) { out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; out_buf[bp] = (in_buf[bp] & 0xff) <= th?0:1; bp--; } return; }
public void doFilter(NyARGrayscaleRaster i_input, NyARBinRaster i_output) { Debug.Assert(i_input.getBufferType()==NyARBufferType.INT1D_GRAY_8); Debug.Assert(i_output.getBufferType()==NyARBufferType.INT1D_BIN_8); int[] out_buf = (int[]) i_output.getBuffer(); int[] in_buf = (int[]) i_input.getBuffer(); NyARIntSize s=i_input.getSize(); int th=this._threshold; int bp =s.w*s.h-1; int pix_count =s.h*s.w; int pix_mod_part=pix_count-(pix_count%8); for(bp=pix_count-1;bp>=pix_mod_part;bp--){ out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; } //タイリング for (;bp>=0;) { out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; out_buf[bp]=(in_buf[bp] & 0xff)<=th?0:1; bp--; } return; }
protected void initInstance(NyARParam i_param, int i_raster_type) { //初期化済? Debug.Assert(this._initialized == false); NyARIntSize scr_size = i_param.getScreenSize(); // 解析オブジェクトを作る this._square_detect = new NyARSquareContourDetector_Rle(scr_size); this._transmat = new NyARTransMat(i_param); this._tobin_filter = new NyARRasterFilter_ARToolkitThreshold(110, i_raster_type); // 2値画像バッファを作る this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h); this._threshold_detect = new NyARRasterThresholdAnalyzer_SlidePTile(15, i_raster_type, 4); this._initialized = true; //コールバックハンドラ this._detectmarker_cb = new DetectSquareCB(i_param); this._offset = new NyARRectOffset(); return; }
protected NyARSingleDetectMarker(NyARParam i_ref_param, NyARCode i_ref_code, double i_marker_width) { this._deviation_data = new NyARMatchPattDeviationColorData(i_ref_code.getWidth(), i_ref_code.getHeight()); this._match_patt = new NyARMatchPatt_Color_WITHOUT_PCA(i_ref_code); this._offset = new NyARRectOffset(); this._offset.setSquare(i_marker_width); this._coordline = new NyARCoord2Linear(i_ref_param.getScreenSize(), i_ref_param.getDistortionFactor()); //2値画像バッファを作る NyARIntSize s = i_ref_param.getScreenSize(); this._bin_raster = new NyARBinRaster(s.w, s.h); }
/** * ARMarkerInfo2 *arDetectMarker2( ARInt16 *limage, int label_num, int *label_ref,int *warea, double *wpos, int *wclip,int area_max, int area_min, double * factor, int *marker_num ) 関数の代替品 ラベリング情報からマーカー一覧を作成してo_marker_listを更新します。 関数はo_marker_listに重なりを除外したマーカーリストを作成します。 * * @param i_raster * 解析する2値ラスタイメージを指定します。 * @param o_square_stack * 抽出した正方形候補を格納するリスト * @throws NyARException */ public void detectMarker(NyARBinRaster i_raster, NyARSquareStack o_square_stack) { INyARLabeling labeling_proc = this._labeling; NyARLabelingImage limage = this._limage; // 初期化 // マーカーホルダをリセット o_square_stack.clear(); // ラベリング labeling_proc.labeling(i_raster); // ラベル数が0ならここまで int label_num = limage.getLabelStack().getLength(); if (label_num < 1) { return; } NyARLabelingLabelStack stack = limage.getLabelStack(); NyARLabelingLabel[] labels = stack.getArray(); // ラベルを大きい順に整列 stack.sortByArea(); // デカいラベルを読み飛ばし int i; for (i = 0; i < label_num; i++) { // 検査対象内のラベルサイズになるまで無視 if (labels[i].area <= AR_AREA_MAX) { break; } } int xsize = this._width; int ysize = this._height; int[] xcoord = this._xcoord; int[] ycoord = this._ycoord; int coord_max = this._max_coord; int[] mkvertex = this.__detectMarker_mkvertex; OverlapChecker overlap = this._overlap_checker; int coord_num; int label_area; NyARLabelingLabel label_pt; //重なりチェッカの最大数を設定 overlap.reset(label_num); int vertex1; for (; i < label_num; i++) { label_pt = labels[i]; label_area = label_pt.area; // 検査対象サイズよりも小さくなったら終了 if (label_area < AR_AREA_MIN) { break; } // クリップ領域が画面の枠に接していれば除外 if (label_pt.clip_l == 1 || label_pt.clip_r == xsize - 2) {// if(wclip[i*4+0] == 1 || wclip[i*4+1] ==xsize-2){ continue; } if (label_pt.clip_t == 1 || label_pt.clip_b == ysize - 2) {// if( wclip[i*4+2] == 1 || wclip[i*4+3] ==ysize-2){ continue; } // 既に検出された矩形との重なりを確認 if (!overlap.check(label_pt)) { // 重なっているようだ。 continue; } // 輪郭を取得 coord_num = limage.getContour(i, coord_max, xcoord, ycoord); if (coord_num == coord_max) { // 輪郭が大きすぎる。 continue; } //頂点候補のインデクスを取得 vertex1 = scanVertex(xcoord, ycoord, coord_num); // 頂点候補(vertex1)を先頭に並べなおした配列を作成する。 normalizeCoord(xcoord, ycoord, vertex1, coord_num); // 領域を準備する。 NyARSquare square_ptr = o_square_stack.prePush(); // 頂点情報を取得 if (!getSquareVertex(xcoord, ycoord, vertex1, coord_num, label_area, mkvertex)) { o_square_stack.pop();// 頂点の取得が出来なかったので破棄 continue; } if (!getSquareLine(mkvertex, xcoord, ycoord, square_ptr)) { // 矩形が成立しなかった。 o_square_stack.pop(); continue; } // 検出済の矩形の属したラベルを重なりチェックに追加する。 overlap.push(label_pt); } return; }
public int getContour(NyARBinRaster i_raster, int i_entry_x, int i_entry_y, int i_array_size, int[] o_coord_x, int[] o_coord_y) { Debug.Assert(i_raster.isEqualBufferType(NyARBufferType.INT1D_BIN_8)); return impl_getContour(i_raster, 0, i_entry_x, i_entry_y, i_array_size, o_coord_x, o_coord_y); }
/** * @param i_raster * @param o_square_stack * @throws NyARException */ public abstract void detectMarker(NyARBinRaster i_raster);
/** * * @param i_raster * @param o_square_stack * @throws NyARException */ public abstract void detectMarkerCB(NyARBinRaster i_raster, IDetectMarkerCallback i_callback);
/** * 画像のサイズパラメータとバッファ参照方式を指定して、インスタンスを生成します。 * バッファの形式は、{@link NyARBufferType#INT1D_GRAY_8}です。 * @param i_width * ラスタのサイズ * @param i_height * ラスタのサイズ * @param i_is_alloc * バッファを外部参照にするかのフラグ値。 * trueなら内部バッファ、falseなら外部バッファを使用します。 * falseの場合、初期のバッファはnullになります。インスタンスを生成したのちに、{@link #wrapBuffer}を使って割り当ててください。 * @throws NyARRuntimeException */ public static INyARBinRaster createInstance(int i_width, int i_height, bool i_is_alloc) { return(NyARBinRaster.createInstance(i_width, i_height, NyARBufferType.INT1D_GRAY_8, i_is_alloc)); }
/** * 内部参照のバッファ({@link NyARBufferType#INT1D_GRAY_8}形式)を持つインスタンスを生成します。 * @param i_width * ラスタのサイズ * @param i_height * ラスタのサイズ * @throws NyARRuntimeException */ public static INyARBinRaster createInstance(int i_width, int i_height) { return(NyARBinRaster.createInstance(i_width, i_height, true)); }
//所望のラスタからBIN-RLEに変換しながらの低速系も準備しようかな /** * 単一閾値を使ってGSラスタをBINラスタに変換しながらラベリングします。 * @param i_gs_raster * @param i_top * @param i_bottom * @param o_stack * @return * @throws NyARException */ public int labeling(NyARBinRaster i_bin_raster, int i_top, int i_bottom, RleLabelFragmentInfoStack o_stack) { Debug.Assert(i_bin_raster.isEqualBufferType(NyARBufferType.INT1D_BIN_8)); return(this.imple_labeling(i_bin_raster, 0, i_top, i_bottom, o_stack)); }
/** * この関数は、ラスタをラべリングします。 * 結果は、o_destinationに出力します。 * <p>メモ - * この関数の元になるARToolKitの関数は、static ARInt16 *labeling2( ARUint8 *image, int thresh,int *label_num, int **area, double **pos, int **clip,int **label_ref, int LorR )です。 * </p> * @param i_raster * 入力元の二値ラスタオブジェクトです。画素形式は、{@link NyARBufferType#INT1D_BIN_8}である必要があります。 * @param o_destination * ラべリング画像の出力先オブジェクトです。i_rasterと同じサイズである必要があります。 * @ */ public int labeling(NyARBinRaster i_raster, NyARLabelingImage o_destination) { Debug.Assert(i_raster.getBufferType() == NyARBufferType.INT1D_BIN_8); int label_img_ptr1, label_pixel; int i, j; int n, k; /* work */ // サイズチェック NyARIntSize in_size = i_raster.getSize(); Debug.Assert(o_destination.getSize().isEqualSize(in_size)); int lxsize = in_size.w;// lxsize = arUtil_c.arImXsize; int lysize = in_size.h;// lysize = arUtil_c.arImYsize; int[] label_img = (int[])o_destination.getBuffer(); // 枠作成はインスタンスを作った直後にやってしまう。 // ラベリング情報のリセット(ラベリングインデックスを使用) o_destination.reset(true); int[] label_idxtbl = o_destination.getIndexArray(); int[] raster_buf = (int[])i_raster.getBuffer(); int[] work2_pt; int wk_max = 0; int pixel_index; int[][] work2 = this._work_holder.work2; // [1,1](ptr0)と、[0,1](ptr1)のインデクス値を計算する。 for (j = 1; j < lysize - 1; j++) {// for (int j = 1; j < lysize - 1;j++, pnt += poff*2, pnt2 += 2) { pixel_index = j * lxsize + 1; label_img_ptr1 = pixel_index - lxsize;// label_img_pt1 = label_img[j - 1]; for (i = 1; i < lxsize - 1; i++, pixel_index++, label_img_ptr1++) {// for(int i = 1; i < lxsize-1;i++, pnt+=poff, pnt2++) { // RGBの合計値が閾値より小さいかな? if (raster_buf[pixel_index] != 0) { label_img[pixel_index] = 0;// label_img_pt0[i] = 0;// *pnt2 = 0; } else { // pnt1 = ShortPointer.wrap(pnt2, -lxsize);//pnt1 =&(pnt2[-lxsize]); if (label_img[label_img_ptr1] > 0) {// if (label_img_pt1[i] > 0) {// if( *pnt1 > 0 ) { label_pixel = label_img[label_img_ptr1];// label_pixel = label_img_pt1[i];// *pnt2 = *pnt1; work2_pt = work2[label_pixel - 1]; work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j; work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j; } else if (label_img[label_img_ptr1 + 1] > 0) {// } else if (label_img_pt1[i + 1] > 0) {// }else if(*(pnt1+1) > 0 ) { if (label_img[label_img_ptr1 - 1] > 0) {// if (label_img_pt1[i - 1] > 0) {// if( *(pnt1-1) > 0 ) { label_pixel = label_idxtbl[label_img[label_img_ptr1 + 1] - 1];// m = label_idxtbl[label_img_pt1[i + 1] - 1];// m // =work[*(pnt1+1)-1]; n = label_idxtbl[label_img[label_img_ptr1 - 1] - 1];// n = label_idxtbl[label_img_pt1[i - 1] - 1];// n =work[*(pnt1-1)-1]; if (label_pixel > n) { // wk=IntPointer.wrap(work, 0);//wk = &(work[0]); for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == label_pixel) {// if( *wk == m ) label_idxtbl[k] = n;// *wk = n; } } label_pixel = n;// *pnt2 = n; } else if (label_pixel < n) { // wk=IntPointer.wrap(work,0);//wk = &(work[0]); for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == n) {// if( *wk == n ){ label_idxtbl[k] = label_pixel;// *wk = m; } } } work2_pt = work2[label_pixel - 1]; work2_pt[0]++; work2_pt[1] += i; work2_pt[2] += j; work2_pt[6] = j; } else if ((label_img[pixel_index - 1]) > 0) {// } else if ((label_img_pt0[i - 1]) > 0) {// }else if(*(pnt2-1) > 0) { label_pixel = label_idxtbl[label_img[label_img_ptr1 + 1] - 1];// m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1]; n = label_idxtbl[label_img[pixel_index - 1] - 1];// n = label_idxtbl[label_img_pt0[i - 1] - 1];// n =work[*(pnt2-1)-1]; if (label_pixel > n) { for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == label_pixel) {// if( *wk == m ){ label_idxtbl[k] = n;// *wk = n; } } label_pixel = n;// *pnt2 = n; } else if (label_pixel < n) { for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == n) {// if( *wk == n ){ label_idxtbl[k] = label_pixel;// *wk = m; } } } work2_pt = work2[label_pixel - 1]; work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j; } else { label_pixel = label_img[label_img_ptr1 + 1];// label_pixel = label_img_pt1[i + 1];// *pnt2 = // *(pnt1+1); work2_pt = work2[label_pixel - 1]; work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j; if (work2_pt[3] > i) {// if(work2[((*pnt2)-1)*7+3] > i ){ work2_pt[3] = i;// work2[((*pnt2)-1)*7+3] = i; } work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j; } } else if ((label_img[label_img_ptr1 - 1]) > 0) {// } else if ((label_img_pt1[i - 1]) > 0) {// }else if( // *(pnt1-1) > 0 ) { label_pixel = label_img[label_img_ptr1 - 1];// label_pixel = label_img_pt1[i - 1];// *pnt2 = // *(pnt1-1); work2_pt = work2[label_pixel - 1]; work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j; if (work2_pt[4] < i) {// if( work2[((*pnt2)-1)*7+4] <i ){ work2_pt[4] = i;// work2[((*pnt2)-1)*7+4] = i; } work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j; } else if (label_img[pixel_index - 1] > 0) {// } else if (label_img_pt0[i - 1] > 0) {// }else if(*(pnt2-1) > 0) { label_pixel = label_img[pixel_index - 1];// label_pixel = label_img_pt0[i - 1];// *pnt2 =*(pnt2-1); work2_pt = work2[label_pixel - 1]; work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j; if (work2_pt[4] < i) {// if( work2[((*pnt2)-1)*7+4] <i ){ work2_pt[4] = i;// work2[((*pnt2)-1)*7+4] = i; } } else { // 現在地までの領域を予約 this._work_holder.reserv(wk_max); wk_max++; label_idxtbl[wk_max - 1] = wk_max; label_pixel = wk_max;// work[wk_max-1] = *pnt2 = wk_max; work2_pt = work2[wk_max - 1]; work2_pt[0] = 1; work2_pt[1] = i; work2_pt[2] = j; work2_pt[3] = i; work2_pt[4] = i; work2_pt[5] = j; work2_pt[6] = j; } label_img[pixel_index] = label_pixel;// label_img_pt0[i] = label_pixel; } } } // インデックステーブルとラベル数の計算 int wlabel_num = 1;// *label_num = *wlabel_num = j - 1; for (i = 0; i < wk_max; i++) {// for(int i = 1; i <= wk_max; i++,wk++) { label_idxtbl[i] = (label_idxtbl[i] == i + 1) ? wlabel_num++ : label_idxtbl[label_idxtbl[i] - 1];// *wk=(*wk==i)?j++:work[(*wk)-1]; } wlabel_num -= 1;// *label_num = *wlabel_num = j - 1; if (wlabel_num == 0) {// if( *label_num == 0 ) { // 発見数0 o_destination.getLabelStack().clear(); return 0; } // ラベル情報の保存等 NyARLabelingLabelStack label_list = o_destination.getLabelStack(); // ラベルバッファを予約 label_list.init(wlabel_num); // エリアと重心、クリップ領域を計算 NyARLabelingLabel label_pt; NyARLabelingLabel[] labels = label_list.getArray(); for (i = 0; i < wlabel_num; i++) { label_pt = labels[i]; label_pt.id = (short)(i + 1); label_pt.area = 0; label_pt.pos_x = label_pt.pos_y = 0; label_pt.clip_l = lxsize;// wclip[i*4+0] = lxsize; label_pt.clip_t = lysize;// wclip[i*4+2] = lysize; label_pt.clip_r = label_pt.clip_b = 0;// wclip[i*4+3] = 0; } for (i = 0; i < wk_max; i++) { label_pt = labels[label_idxtbl[i] - 1]; work2_pt = work2[i]; label_pt.area += work2_pt[0]; label_pt.pos_x += work2_pt[1]; label_pt.pos_y += work2_pt[2]; if (label_pt.clip_l > work2_pt[3]) { label_pt.clip_l = work2_pt[3]; } if (label_pt.clip_r < work2_pt[4]) { label_pt.clip_r = work2_pt[4]; } if (label_pt.clip_t > work2_pt[5]) { label_pt.clip_t = work2_pt[5]; } if (label_pt.clip_b < work2_pt[6]) { label_pt.clip_b = work2_pt[6]; } } for (i = 0; i < wlabel_num; i++) {// for(int i = 0; i < *label_num; i++ ) { label_pt = labels[i]; label_pt.pos_x /= label_pt.area; label_pt.pos_y /= label_pt.area; } return wlabel_num; }
public virtual void labeling(NyARBinRaster i_bin_raster, NyARIntRect i_area) { Debug.Assert(i_bin_raster.isEqualBufferType(NyARBufferType.INT1D_BIN_8)); this.imple_labeling(i_bin_raster, 0, i_area.x, i_area.y, i_area.w, i_area.h); }
/** * この関数は、ラスタをラべリングします。 * 結果は、o_destinationに出力します。 * <p>メモ - * この関数の元になるARToolKitの関数は、static ARInt16 *labeling2( ARUint8 *image, int thresh,int *label_num, int **area, double **pos, int **clip,int **label_ref, int LorR )です。 * </p> * @param i_raster * 入力元の二値ラスタオブジェクトです。画素形式は、{@link NyARBufferType#INT1D_BIN_8}である必要があります。 * @param o_destination * ラべリング画像の出力先オブジェクトです。i_rasterと同じサイズである必要があります。 * @ */ public int labeling(NyARBinRaster i_raster, NyARLabelingImage o_destination) { Debug.Assert(i_raster.getBufferType() == NyARBufferType.INT1D_BIN_8); int label_img_ptr1, label_pixel; int i, j; int n, k; /* work */ // サイズチェック NyARIntSize in_size = i_raster.getSize(); Debug.Assert(o_destination.getSize().isEqualSize(in_size)); int lxsize = in_size.w; // lxsize = arUtil_c.arImXsize; int lysize = in_size.h; // lysize = arUtil_c.arImYsize; int[] label_img = (int[])o_destination.getBuffer(); // 枠作成はインスタンスを作った直後にやってしまう。 // ラベリング情報のリセット(ラベリングインデックスを使用) o_destination.reset(true); int[] label_idxtbl = o_destination.getIndexArray(); int[] raster_buf = (int[])i_raster.getBuffer(); int[] work2_pt; int wk_max = 0; int pixel_index; int[][] work2 = this._work_holder.work2; // [1,1](ptr0)と、[0,1](ptr1)のインデクス値を計算する。 for (j = 1; j < lysize - 1; j++) { // for (int j = 1; j < lysize - 1;j++, pnt += poff*2, pnt2 += 2) { pixel_index = j * lxsize + 1; label_img_ptr1 = pixel_index - lxsize; // label_img_pt1 = label_img[j - 1]; for (i = 1; i < lxsize - 1; i++, pixel_index++, label_img_ptr1++) { // for(int i = 1; i < lxsize-1;i++, pnt+=poff, pnt2++) { // RGBの合計値が閾値より小さいかな? if (raster_buf[pixel_index] != 0) { label_img[pixel_index] = 0;// label_img_pt0[i] = 0;// *pnt2 = 0; } else { // pnt1 = ShortPointer.wrap(pnt2, -lxsize);//pnt1 =&(pnt2[-lxsize]); if (label_img[label_img_ptr1] > 0) { // if (label_img_pt1[i] > 0) {// if( *pnt1 > 0 ) { label_pixel = label_img[label_img_ptr1]; // label_pixel = label_img_pt1[i];// *pnt2 = *pnt1; work2_pt = work2[label_pixel - 1]; work2_pt[0]++; // work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i; // work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j; // work2[((*pnt2)-1)*7+2] += j; work2_pt[6] = j; // work2[((*pnt2)-1)*7+6] = j; } else if (label_img[label_img_ptr1 + 1] > 0) { // } else if (label_img_pt1[i + 1] > 0) {// }else if(*(pnt1+1) > 0 ) { if (label_img[label_img_ptr1 - 1] > 0) { // if (label_img_pt1[i - 1] > 0) {// if( *(pnt1-1) > 0 ) { label_pixel = label_idxtbl[label_img[label_img_ptr1 + 1] - 1]; // m = label_idxtbl[label_img_pt1[i + 1] - 1];// m // =work[*(pnt1+1)-1]; n = label_idxtbl[label_img[label_img_ptr1 - 1] - 1]; // n = label_idxtbl[label_img_pt1[i - 1] - 1];// n =work[*(pnt1-1)-1]; if (label_pixel > n) { // wk=IntPointer.wrap(work, 0);//wk = &(work[0]); for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == label_pixel) { // if( *wk == m ) label_idxtbl[k] = n; // *wk = n; } } label_pixel = n;// *pnt2 = n; } else if (label_pixel < n) { // wk=IntPointer.wrap(work,0);//wk = &(work[0]); for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == n) { // if( *wk == n ){ label_idxtbl[k] = label_pixel; // *wk = m; } } } work2_pt = work2[label_pixel - 1]; work2_pt[0]++; work2_pt[1] += i; work2_pt[2] += j; work2_pt[6] = j; } else if ((label_img[pixel_index - 1]) > 0) { // } else if ((label_img_pt0[i - 1]) > 0) {// }else if(*(pnt2-1) > 0) { label_pixel = label_idxtbl[label_img[label_img_ptr1 + 1] - 1]; // m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1]; n = label_idxtbl[label_img[pixel_index - 1] - 1]; // n = label_idxtbl[label_img_pt0[i - 1] - 1];// n =work[*(pnt2-1)-1]; if (label_pixel > n) { for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == label_pixel) { // if( *wk == m ){ label_idxtbl[k] = n; // *wk = n; } } label_pixel = n;// *pnt2 = n; } else if (label_pixel < n) { for (k = 0; k < wk_max; k++) { if (label_idxtbl[k] == n) { // if( *wk == n ){ label_idxtbl[k] = label_pixel; // *wk = m; } } } work2_pt = work2[label_pixel - 1]; work2_pt[0]++; // work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i; // work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j; // work2[((*pnt2)-1)*7+2] += j; } else { label_pixel = label_img[label_img_ptr1 + 1];// label_pixel = label_img_pt1[i + 1];// *pnt2 = // *(pnt1+1); work2_pt = work2[label_pixel - 1]; work2_pt[0]++; // work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i; // work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j; // work2[((*pnt2)-1)*7+2] += j; if (work2_pt[3] > i) { // if(work2[((*pnt2)-1)*7+3] > i ){ work2_pt[3] = i; // work2[((*pnt2)-1)*7+3] = i; } work2_pt[6] = j; // work2[((*pnt2)-1)*7+6] = j; } } else if ((label_img[label_img_ptr1 - 1]) > 0) { // } else if ((label_img_pt1[i - 1]) > 0) {// }else if( // *(pnt1-1) > 0 ) { label_pixel = label_img[label_img_ptr1 - 1]; // label_pixel = label_img_pt1[i - 1];// *pnt2 = // *(pnt1-1); work2_pt = work2[label_pixel - 1]; work2_pt[0]++; // work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i; // work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j; // work2[((*pnt2)-1)*7+2] += j; if (work2_pt[4] < i) { // if( work2[((*pnt2)-1)*7+4] <i ){ work2_pt[4] = i; // work2[((*pnt2)-1)*7+4] = i; } work2_pt[6] = j; // work2[((*pnt2)-1)*7+6] = j; } else if (label_img[pixel_index - 1] > 0) { // } else if (label_img_pt0[i - 1] > 0) {// }else if(*(pnt2-1) > 0) { label_pixel = label_img[pixel_index - 1]; // label_pixel = label_img_pt0[i - 1];// *pnt2 =*(pnt2-1); work2_pt = work2[label_pixel - 1]; work2_pt[0]++; // work2[((*pnt2)-1)*7+0] ++; work2_pt[1] += i; // work2[((*pnt2)-1)*7+1] += i; work2_pt[2] += j; // work2[((*pnt2)-1)*7+2] += j; if (work2_pt[4] < i) { // if( work2[((*pnt2)-1)*7+4] <i ){ work2_pt[4] = i; // work2[((*pnt2)-1)*7+4] = i; } } else { // 現在地までの領域を予約 this._work_holder.reserv(wk_max); wk_max++; label_idxtbl[wk_max - 1] = wk_max; label_pixel = wk_max;// work[wk_max-1] = *pnt2 = wk_max; work2_pt = work2[wk_max - 1]; work2_pt[0] = 1; work2_pt[1] = i; work2_pt[2] = j; work2_pt[3] = i; work2_pt[4] = i; work2_pt[5] = j; work2_pt[6] = j; } label_img[pixel_index] = label_pixel;// label_img_pt0[i] = label_pixel; } } } // インデックステーブルとラベル数の計算 int wlabel_num = 1;// *label_num = *wlabel_num = j - 1; for (i = 0; i < wk_max; i++) { // for(int i = 1; i <= wk_max; i++,wk++) { label_idxtbl[i] = (label_idxtbl[i] == i + 1) ? wlabel_num++ : label_idxtbl[label_idxtbl[i] - 1]; // *wk=(*wk==i)?j++:work[(*wk)-1]; } wlabel_num -= 1; // *label_num = *wlabel_num = j - 1; if (wlabel_num == 0) { // if( *label_num == 0 ) { // 発見数0 o_destination.getLabelStack().clear(); return(0); } // ラベル情報の保存等 NyARLabelingLabelStack label_list = o_destination.getLabelStack(); // ラベルバッファを予約 label_list.init(wlabel_num); // エリアと重心、クリップ領域を計算 NyARLabelingLabel label_pt; NyARLabelingLabel[] labels = label_list.getArray(); for (i = 0; i < wlabel_num; i++) { label_pt = labels[i]; label_pt.id = (short)(i + 1); label_pt.area = 0; label_pt.pos_x = label_pt.pos_y = 0; label_pt.clip_l = lxsize; // wclip[i*4+0] = lxsize; label_pt.clip_t = lysize; // wclip[i*4+2] = lysize; label_pt.clip_r = label_pt.clip_b = 0; // wclip[i*4+3] = 0; } for (i = 0; i < wk_max; i++) { label_pt = labels[label_idxtbl[i] - 1]; work2_pt = work2[i]; label_pt.area += work2_pt[0]; label_pt.pos_x += work2_pt[1]; label_pt.pos_y += work2_pt[2]; if (label_pt.clip_l > work2_pt[3]) { label_pt.clip_l = work2_pt[3]; } if (label_pt.clip_r < work2_pt[4]) { label_pt.clip_r = work2_pt[4]; } if (label_pt.clip_t > work2_pt[5]) { label_pt.clip_t = work2_pt[5]; } if (label_pt.clip_b < work2_pt[6]) { label_pt.clip_b = work2_pt[6]; } } for (i = 0; i < wlabel_num; i++) {// for(int i = 0; i < *label_num; i++ ) { label_pt = labels[i]; label_pt.pos_x /= label_pt.area; label_pt.pos_y /= label_pt.area; } return(wlabel_num); }
public override void detectMarkerCB(NyARBinRaster i_raster, IDetectMarkerCallback i_callback) { RleLabelFragmentInfoStack flagment = this._stack; LabelOverlapChecker <RleLabelFragmentInfoStack.RleLabelFragmentInfo> overlap = this._overlap_checker; // ラベル数が0ならここまで int label_num = this._labeling.labeling(i_raster, 0, i_raster.getHeight(), flagment); if (label_num < 1) { return; } //ラベルをソートしておく flagment.sortByArea(); //ラベルリストを取得 RleLabelFragmentInfoStack.RleLabelFragmentInfo[] labels = flagment.getArray(); int xsize = this._width; int ysize = this._height; int[] xcoord = this._xcoord; int[] ycoord = this._ycoord; int coord_max = this._max_coord; int[] mkvertex = this.__detectMarker_mkvertex; //重なりチェッカの最大数を設定 overlap.setMaxLabels(label_num); for (int i = 0; i < label_num; i++) { RleLabelFragmentInfoStack.RleLabelFragmentInfo label_pt = labels[i]; int label_area = label_pt.area; // クリップ領域が画面の枠に接していれば除外 if (label_pt.clip_l == 0 || label_pt.clip_r == xsize - 1) { continue; } if (label_pt.clip_t == 0 || label_pt.clip_b == ysize - 1) { continue; } // 既に検出された矩形との重なりを確認 if (!overlap.check(label_pt)) { // 重なっているようだ。 continue; } //輪郭を取得 int coord_num = _cpickup.getContour(i_raster, label_pt.entry_x, label_pt.clip_t, coord_max, xcoord, ycoord); if (coord_num == coord_max) { // 輪郭が大きすぎる。 continue; } //輪郭線をチェックして、矩形かどうかを判定。矩形ならばmkvertexに取得 if (!this._coord2vertex.getVertexIndexes(xcoord, ycoord, coord_num, label_area, mkvertex)) { // 頂点の取得が出来なかった continue; } //矩形を発見したことをコールバック関数で通知 i_callback.onSquareDetect(this, xcoord, ycoord, coord_num, mkvertex); // 検出済の矩形の属したラベルを重なりチェックに追加する。 overlap.push(label_pt); } return; }
/** * arDetectMarker2を基にした関数 * この関数はNyARSquare要素のうち、directionを除くパラメータを取得して返します。 * directionの確定は行いません。 * @param i_raster * 解析する2値ラスタイメージを指定します。 * @throws NyARException */ public override void detectMarker(NyARBinRaster i_raster) { NyARLabelingImage limage = this._limage; // ラベル数が0ならここまで int label_num = this._labeling.labeling(i_raster, this._limage); if (label_num < 1) { return; } NyARLabelingLabelStack stack = limage.getLabelStack(); //ラベルをソートしておく stack.sortByArea(); // NyARLabelingLabel[] labels = stack.getArray(); // デカいラベルを読み飛ばし int i; for (i = 0; i < label_num; i++) { // 検査対象内のラベルサイズになるまで無視 if (labels[i].area <= AR_AREA_MAX) { break; } } int xsize = this._width; int ysize = this._height; NyARIntCoordinates coord = this._coord; int[] mkvertex = this.__detectMarker_mkvertex; NyARLabelOverlapChecker <NyARLabelingLabel> overlap = this._overlap_checker; //重なりチェッカの最大数を設定 overlap.setMaxLabels(label_num); for (; i < label_num; i++) { NyARLabelingLabel label_pt = labels[i]; int label_area = label_pt.area; // 検査対象サイズよりも小さくなったら終了 if (label_area < AR_AREA_MIN) { break; } // クリップ領域が画面の枠に接していれば除外 if (label_pt.clip_l == 1 || label_pt.clip_r == xsize - 2) // if(wclip[i*4+0] == 1 || wclip[i*4+1] ==xsize-2){ { continue; } if (label_pt.clip_t == 1 || label_pt.clip_b == ysize - 2) // if( wclip[i*4+2] == 1 || wclip[i*4+3] ==ysize-2){ { continue; } // 既に検出された矩形との重なりを確認 if (!overlap.check(label_pt)) { // 重なっているようだ。 continue; } // 輪郭を取得 if (!this._cpickup.getContour(limage, limage.getTopClipTangentX(label_pt), label_pt.clip_t, coord)) { continue; } //輪郭線をチェックして、矩形かどうかを判定。矩形ならばmkvertexに取得 if (!this._coord2vertex.getVertexIndexes(coord, label_area, mkvertex)) { // 頂点の取得が出来なかった continue; } //矩形を発見したことをコールバック関数で通知 this.onSquareDetect(coord, mkvertex); // 検出済の矩形の属したラベルを重なりチェックに追加する。 overlap.push(label_pt); } return; }
public override void detectMarkerCB(NyARBinRaster i_raster, IDetectMarkerCallback i_callback) { RleLabelFragmentInfoStack flagment = this._stack; LabelOverlapChecker<RleLabelFragmentInfoStack.RleLabelFragmentInfo> overlap = this._overlap_checker; // ラベル数が0ならここまで int label_num = this._labeling.labeling(i_raster, 0, i_raster.getHeight(), flagment); if (label_num < 1) { return; } //ラベルをソートしておく flagment.sortByArea(); //ラベルリストを取得 RleLabelFragmentInfoStack.RleLabelFragmentInfo[] labels = flagment.getArray(); int xsize = this._width; int ysize = this._height; int[] xcoord = this._xcoord; int[] ycoord = this._ycoord; int coord_max = this._max_coord; int[] mkvertex = this.__detectMarker_mkvertex; //重なりチェッカの最大数を設定 overlap.setMaxLabels(label_num); for (int i = 0; i < label_num; i++) { RleLabelFragmentInfoStack.RleLabelFragmentInfo label_pt = labels[i]; int label_area = label_pt.area; // クリップ領域が画面の枠に接していれば除外 if (label_pt.clip_l == 0 || label_pt.clip_r == xsize - 1) { continue; } if (label_pt.clip_t == 0 || label_pt.clip_b == ysize - 1) { continue; } // 既に検出された矩形との重なりを確認 if (!overlap.check(label_pt)) { // 重なっているようだ。 continue; } //輪郭を取得 int coord_num = _cpickup.getContour(i_raster, label_pt.entry_x, label_pt.clip_t, coord_max, xcoord, ycoord); if (coord_num == coord_max) { // 輪郭が大きすぎる。 continue; } //輪郭線をチェックして、矩形かどうかを判定。矩形ならばmkvertexに取得 if (!this._coord2vertex.getVertexIndexes(xcoord, ycoord, coord_num, label_area, mkvertex)) { // 頂点の取得が出来なかった continue; } //矩形を発見したことをコールバック関数で通知 i_callback.onSquareDetect(this, xcoord, ycoord, coord_num, mkvertex); // 検出済の矩形の属したラベルを重なりチェックに追加する。 overlap.push(label_pt); } return; }
public bool getContour(NyARBinRaster i_raster, NyARIntRect i_area, int i_entry_x, int i_entry_y, NyARIntCoordinates o_coord) { Debug.Assert(i_raster.isEqualBufferType(NyARBufferType.INT1D_BIN_8)); return(impl_getContour(i_raster, i_area.x, i_area.y, i_area.x + i_area.w - 1, i_area.h + i_area.y - 1, 0, i_entry_x, i_entry_y, o_coord)); }
public void doFilter(INyARRgbRaster i_input, NyARBinRaster i_output) { Debug.Assert(i_input.getSize().isEqualSize(i_output.getSize()) == true); this._do_threshold_impl.doThFilter(i_input, i_output, i_output.getSize(), this._threshold); return; }
public void doFilter(INyARRgbRaster i_input, NyARBinRaster i_output) { NyARIntSize s = i_input.getSize(); this._do_threshold_impl.doThFilter(i_input, 0, 0, s.w, s.h, this._threshold, i_output); return; }
protected void initInstance(NyARParam i_param, INyIdMarkerDataEncoder i_encoder, double i_marker_width, int i_raster_format) { //初期化済? Debug.Assert(this._initialized == false); NyARIntSize scr_size = i_param.getScreenSize(); // 解析オブジェクトを作る this._square_detect = new NyARSquareContourDetector_Rle(scr_size); this._transmat = new NyARTransMat(i_param); this._callback = new DetectSquareCB(i_param, i_encoder); // 2値画像バッファを作る this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h); //ワーク用のデータオブジェクトを2個作る this._data_current = i_encoder.createDataInstance(); this._tobin_filter = new NyARRasterFilter_ARToolkitThreshold(110, i_raster_format); this._threshold_detect = new NyARRasterThresholdAnalyzer_SlidePTile(15, i_raster_format, 4); this._initialized = true; this._is_active = false; this._offset = new NyARRectOffset(); this._offset.setSquare(i_marker_width); return; }
public void doFilter(INyARRgbRaster i_input, NyARIntRect i_area, NyARBinRaster i_output) { this._do_threshold_impl.doThFilter(i_input, i_area.x, i_area.y, i_area.w, i_area.h, this._threshold, i_output); return; }
/// <summary> /// Initializes the detector for single marker detection. /// </summary> /// <param name="width">The width of the buffer that will be used for detection.</param> /// <param name="height">The height of the buffer that will be used for detection.</param> /// <param name="nearPlane">The near view plane of the frustum.</param> /// <param name="farPlane">The far view plane of the frustum.</param> /// <param name="markers">A list of markers that should be detected.</param> /// <param name="bufferType">The type of the buffer.</param> /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param> protected void Initialize(int width, int height, double nearPlane, double farPlane, IList<Marker> markers, int bufferType, bool adaptive = false) { // Check arguments if (markers == null || !markers.Any()) { throw new ArgumentNullException("markers"); } // Member init this.bufferWidth = width; this.bufferHeight = height; this.isAdaptive = adaptive; // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers int segmentX = markers[0].SegmentsX; int segmentY = markers[0].SegmentsY; var patternMatchers = new List<PatternMatcher>(markers.Count); foreach (var marker in markers) { if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY) { throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers"); } patternMatchers.Add(new PatternMatcher(marker)); } // Load deafult camera calibration data string location = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); StreamReader reader = new StreamReader(location + "/Content/Data/Camera_Calibration_1280x720.dat"); var cameraParameters = new NyARParam(); using (var cameraCalibrationDataStream = reader.BaseStream) { cameraParameters.loadARParam(cameraCalibrationDataStream); cameraParameters.changeScreenSize(width, height); } //var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name; //var uri = new Uri(asmName + ";component/Assets/data/Camera_Calibration_1280x720.dat", UriKind.Relative); //var streamResInfoCam = Application.GetResourceStream(uri); //if (null == streamResInfoCam) // throw new FileNotFoundException("Application.GetResourceStream returned null", uri.OriginalString); //var cameraParameters = new NyARParam(); //using (var cameraCalibrationDataStream = streamResInfoCam.Stream) //{ // cameraParameters.loadARParam(cameraCalibrationDataStream); // cameraParameters.changeScreenSize(width, height); //} // Get projection matrix from camera calibration data this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane); // Init detector and necessary data var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25); var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY); this.squareDetector = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize()); this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData); // Init buffer members this.filteredBuffer = new NyARBinRaster(width, height); if (adaptive) { this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType); } else { this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType); } }
protected void initInstance( INyARColorPatt i_patt_inst, NyARSquareContourDetector i_sqdetect_inst, INyARTransMat i_transmat_inst, INyARRasterFilter_Rgb2Bin i_filter, NyARParam i_ref_param, NyARCode i_ref_code, double i_marker_width) { NyARIntSize scr_size = i_ref_param.getScreenSize(); // 解析オブジェクトを作る this._square_detect = i_sqdetect_inst; this._transmat = i_transmat_inst; this._tobin_filter = i_filter; //2値画像バッファを作る this._bin_raster = new NyARBinRaster(scr_size.w, scr_size.h); //_detect_cb this._detect_cb = new DetectSquareCB(i_patt_inst, i_ref_code, i_ref_param); //オフセットを作成 this._offset = new NyARRectOffset(); this._offset.setSquare(i_marker_width); return; }
public int getContour(NyARBinRaster i_raster, int i_entry_x, int i_entry_y, int i_array_size, int[] o_coord_x, int[] o_coord_y) { Debug.Assert(i_raster.isEqualBufferType(NyARBufferType.INT1D_BIN_8)); return(impl_getContour(i_raster, 0, i_entry_x, i_entry_y, i_array_size, o_coord_x, o_coord_y)); }
/** * arDetectMarker2を基にした関数 * この関数はNyARSquare要素のうち、directionを除くパラメータを取得して返します。 * directionの確定は行いません。 * @param i_raster * 解析する2値ラスタイメージを指定します。 * @param o_square_stack * 抽出した正方形候補を格納するリスト * @throws NyARException */ public override void detectMarkerCB(NyARBinRaster i_raster, IDetectMarkerCallback i_callback) { NyARLabelingImage limage = this._limage; // ラベル数が0ならここまで int label_num = this._labeling.labeling(i_raster, this._limage); if (label_num < 1) { return; } NyARLabelingLabelStack stack = limage.getLabelStack(); //ラベルをソートしておく stack.sortByArea(); // NyARLabelingLabel[] labels = stack.getArray(); // デカいラベルを読み飛ばし int i; for (i = 0; i < label_num; i++) { // 検査対象内のラベルサイズになるまで無視 if (labels[i].area <= AR_AREA_MAX) { break; } } int xsize = this._width; int ysize = this._height; int[] xcoord = this._xcoord; int[] ycoord = this._ycoord; int coord_max = this._max_coord; int[] mkvertex = this.__detectMarker_mkvertex; LabelOverlapChecker<NyARLabelingLabel> overlap = this._overlap_checker; //重なりチェッカの最大数を設定 overlap.setMaxLabels(label_num); for (; i < label_num; i++) { NyARLabelingLabel label_pt = labels[i]; int label_area = label_pt.area; // 検査対象サイズよりも小さくなったら終了 if (label_area < AR_AREA_MIN) { break; } // クリップ領域が画面の枠に接していれば除外 if (label_pt.clip_l == 1 || label_pt.clip_r == xsize - 2) {// if(wclip[i*4+0] == 1 || wclip[i*4+1] ==xsize-2){ continue; } if (label_pt.clip_t == 1 || label_pt.clip_b == ysize - 2) {// if( wclip[i*4+2] == 1 || wclip[i*4+3] ==ysize-2){ continue; } // 既に検出された矩形との重なりを確認 if (!overlap.check(label_pt)) { // 重なっているようだ。 continue; } // 輪郭を取得 int coord_num = _cpickup.getContour(limage, limage.getTopClipTangentX(label_pt), label_pt.clip_t, coord_max, xcoord, ycoord); if (coord_num == coord_max) { // 輪郭が大きすぎる。 continue; } //輪郭線をチェックして、矩形かどうかを判定。矩形ならばmkvertexに取得 if (!this._coord2vertex.getVertexIndexes(xcoord, ycoord, coord_num, label_area, mkvertex)) { // 頂点の取得が出来なかった continue; } //矩形を発見したことをコールバック関数で通知 i_callback.onSquareDetect(this, xcoord, ycoord, coord_num, mkvertex); // 検出済の矩形の属したラベルを重なりチェックに追加する。 overlap.push(label_pt); } return; }