public static NyARNftFreakFsetFile genFeatureSet3(NyARNftIsetFile i_iset_file) { int max_features = 500; DogFeaturePointStack _dog_feature_points = new DogFeaturePointStack(max_features); FreakFeaturePointStack query_keypoint = new FreakFeaturePointStack(max_features); // List <NyARNftFreakFsetFile.RefDataSet> refdataset = new List <NyARNftFreakFsetFile.RefDataSet>(); List <NyARNftFreakFsetFile.ImageInfo> imageinfo = new List <NyARNftFreakFsetFile.ImageInfo>(); for (int ii = 0; ii < i_iset_file.items.Length; ii++) { NyARNftIsetFile.ReferenceImage rimg = i_iset_file.items[ii]; FREAKExtractor mFeatureExtractor = new FREAKExtractor(); int octerves = BinomialPyramid32f.octavesFromMinimumCoarsestSize(rimg.width, rimg.height, 8); BinomialPyramid32f _pyramid = new BinomialPyramid32f(rimg.width, rimg.height, octerves, 3); DoGScaleInvariantDetector _dog_detector = new DoGScaleInvariantDetector(rimg.width, rimg.height, octerves, 3, 3, 4, max_features); //RefDatasetの作成 _pyramid.build(NyARGrayscaleRaster.createInstance(rimg.width, rimg.height, NyARBufferType.INT1D_GRAY_8, rimg.img)); // Detect feature points _dog_feature_points.clear(); _dog_detector.detect(_pyramid, _dog_feature_points); // Extract features query_keypoint.clear(); mFeatureExtractor.extract(_pyramid, _dog_feature_points, query_keypoint); for (int i = 0; i < query_keypoint.getLength(); i++) { FreakFeaturePoint ffp = query_keypoint.getItem(i); NyARNftFreakFsetFile.RefDataSet rds = new NyARNftFreakFsetFile.RefDataSet(); rds.pageNo = 1; rds.refImageNo = ii; rds.coord2D.setValue(ffp.x, ffp.y); rds.coord3D.setValue((ffp.x + 0.5f) / rimg.dpi * 25.4f, ((rimg.height - 0.5f) - ffp.y) / rimg.dpi * 25.4f); rds.featureVec.angle = ffp.angle; rds.featureVec.maxima = ffp.maxima ? 1 : 0; rds.featureVec.scale = ffp.scale; ffp.descripter.getValueLe(rds.featureVec.v); refdataset.Add(rds); } imageinfo.Add(new NyARNftFreakFsetFile.ImageInfo(rimg.width, rimg.height, ii)); } NyARNftFreakFsetFile.PageInfo[] pi = new NyARNftFreakFsetFile.PageInfo[1]; pi[0] = new NyARNftFreakFsetFile.PageInfo(1, imageinfo.ToArray()); return(new NyARNftFreakFsetFile(refdataset.ToArray(), pi)); }
private bool query(FreakFeaturePointStack query_keyframe, KeyframeMap i_keymap, FeaturePairStack i_result) { // mMatchedInliers.clear(); HomographyMat H = this._H; InverseHomographyMat hinv = this._hinv; hinv = new InverseHomographyMat_O1(); int num_of_query_frame = query_keyframe.getLength(); //ワークエリアの設定 if (num_of_query_frame > this._tmp_pair_stack[0].getArraySize()) { this._tmp_pair_stack[0] = new FeaturePairStack(num_of_query_frame + 10); this._tmp_pair_stack[1] = new FeaturePairStack(num_of_query_frame + 10); } int tmp_ch = 0; int last_inliers = 0; foreach (KeyValuePair <int, Keyframe> i in i_keymap) { Keyframe second = i.Value; FreakMatchPointSetStack ref_points = second.getFeaturePointSet(); //新しいワークエリアを作る。 FeaturePairStack match_result = this._tmp_pair_stack[tmp_ch]; //ワークエリア初期化 match_result.clear(); //特徴量同士のマッチング if (this._matcher.match(query_keyframe, second, match_result) < this.mMinNumInliers) { continue; } // Vote for a transformation based on the correspondences if (!this.mHoughSimilarityVoting.extractMatches(match_result, second.width(), second.height())) { continue; } // Estimate the transformation between the two images if (!this.mRobustHomography.PreemptiveRobustHomography(H, match_result, second.width(), second.height())) { continue; } //ここでHInv計算 if (!hinv.inverse(H)) { continue; } // Apply some heuristics to the homography if (!hinv.checkHomographyHeuristics(second.width(), second.height())) { continue; } // Find the inliers this._find_inliner.extructMatches(H, match_result); if (match_result.getLength() < mMinNumInliers) { continue; } // // Use the estimated homography to find more inliers match_result.clear(); if (_matcher.match(query_keyframe, ref_points, hinv, 10, match_result) < mMinNumInliers) { continue; } // // Vote for a similarity with new matches if (!this.mHoughSimilarityVoting.extractMatches(match_result, second.width(), second.height())) { continue; } // // Re-estimate the homography if (!this.mRobustHomography.PreemptiveRobustHomography(H, match_result, second.width(), second.height())) { continue; } // Apply some heuristics to the homography if (!hinv.inverse(H)) { continue; } if (!hinv.checkHomographyHeuristics(second.width(), second.height())) { continue; } // // Check if this is the best match based on number of inliers this._find_inliner.extructMatches(H, match_result); //ポイント数が最小値より大きい&&最高成績ならテンポラリチャンネルを差し替える。 if (match_result.getLength() >= mMinNumInliers && match_result.getLength() > last_inliers) { //出力チャンネルを切り替え tmp_ch = (tmp_ch + 1) % 2; last_inliers = match_result.getLength(); } } //出力は last_inlines>0の場合に[(tmp_ch+1)%2]にある。 if (last_inliers <= 0) { return(false); } { FeaturePairStack match_result = this._tmp_pair_stack[(tmp_ch + 1) % 2]; FeaturePairStack.Item[] dest = match_result.getArray(); for (int i = 0; i < match_result.getLength(); i++) { FeaturePairStack.Item t = i_result.prePush(); if (t == null) { System.Console.WriteLine("Push overflow!"); break; } t.query = dest[i].query; t.ref_ = dest[i].ref_; } } return(true); }