private static void CompareImages(Mat scene, Mat toolbarObject, Mat wordbrainObject) { SURFData toolbarSurfResults = ExecuteSurfDetection(toolbarObject); SURFData wordbrainSurfResults = ExecuteSurfDetection(wordbrainObject); SURFData sceneSurfResults = ExecuteSurfDetection(scene); Mat drawnMatches = new Mat(); VectorOfVectorOfDMatch toolbarMatchResults = GetSceneMatchesForModel(sceneSurfResults, toolbarSurfResults); VectorOfVectorOfDMatch wordbrainMatchResults = GetSceneMatchesForModel(sceneSurfResults, wordbrainSurfResults); MKeyPoint[] sceneKeyPoints = sceneSurfResults.KeyPoints.ToArray(); Point highestKeyPoint = toolbarMatchResults.ToArrayOfArray() .Select(m => Point.Round(sceneKeyPoints[m[0].QueryIdx].Point)) .OrderBy(kp => kp.Y) .FirstOrDefault(); Point lowestKeyPoint = wordbrainMatchResults.ToArrayOfArray() .Select(m => Point.Round(sceneKeyPoints[m[0].QueryIdx].Point)) .OrderByDescending(kp => kp.Y) .FirstOrDefault(); int rectangleHeight = highestKeyPoint.Y - lowestKeyPoint.Y; Image <Gray, byte> sceneImage = scene.ToImage <Gray, Byte>(); Console.WriteLine(sceneImage.Width); Console.WriteLine(sceneImage.Height); Rectangle rectangle = new Rectangle(0, lowestKeyPoint.Y, scene.Width, rectangleHeight); //sceneImage.Draw("X", highestKeyPoint, FontFace.HersheyPlain, 5, new Gray(255), thickness: 2); //sceneImage.Draw("X", lowestKeyPoint, FontFace.HersheyPlain, 5, new Gray(255), thickness: 2); //sceneImage.Draw(rectangle, new Gray(10), 5); //ImageViewer.Show(sceneImage); //Features2DToolbox.DrawMatches(toolbarObject, toolbarSurfResults.KeyPoints, scene, // sceneSurfResults.KeyPoints, limitMatches, drawnMatches, new MCvScalar(255), new MCvScalar(255), null, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints); Image <Gray, byte> sliced = sceneImage.Copy(rectangle); sliced.Save("../../../characters/characters-and-clues-result.jpg"); ImageViewer.Show(sliced); }
/// <summary> /// Filter the matched Features, such that if a match is not unique, it is rejected. /// </summary> /// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param> /// <param name="mask">This is both input and output. This matrix indicates which row is valid for the matches.</param> /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param> public static void VoteForUniqueness(VectorOfVectorOfDMatch matches, double uniquenessThreshold, Mat mask) { MDMatch[][] mArr = matches.ToArrayOfArray(); byte[] maskData = new byte[mArr.Length]; GCHandle maskHandle = GCHandle.Alloc(maskData, GCHandleType.Pinned); using (Mat m = new Mat(mArr.Length, 1, DepthType.Cv8U, 1, maskHandle.AddrOfPinnedObject(), 1)) { mask.CopyTo(m); for (int i = 0; i < mArr.Length; i++) { if (maskData[i] != 0 && (mArr[i][0].Distance / mArr[i][1].Distance) > uniquenessThreshold) { maskData[i] = (byte)0; } } m.CopyTo(mask); } maskHandle.Free(); }
public VectorOfVectorOfDMatch GetMatchesForModel(SurfData scene, SurfData model) { using (FlannBasedMatcher matcher = new FlannBasedMatcher(_hierarchicalParams, _searchParams)) { matcher.Add(model.Descriptors); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); matcher.KnnMatch(scene.Descriptors, matches, _kConstant, null); MDMatch[][] newMatches = matches .ToArrayOfArray() .OrderBy(m => m[0].Distance) .Take(_matchLimit) .ToArray(); VectorOfVectorOfDMatch limitMatches = new VectorOfVectorOfDMatch(newMatches); matches.Dispose(); return(limitMatches); } }
private static VectorOfVectorOfDMatch GetSceneMatchesForModel(SURFData sceneData, SURFData modelData) { FlannBasedMatcher matcher = new FlannBasedMatcher(new HierarchicalClusteringIndexParams(), new SearchParams()); matcher.Add(modelData.Descriptors); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); matcher.KnnMatch(sceneData.Descriptors, matches, 1, null); MDMatch[][] newMatches = matches .ToArrayOfArray() .OrderBy(m => m[0].Distance) .Take(8) .ToArray(); VectorOfVectorOfDMatch limitMatches = new VectorOfVectorOfDMatch(newMatches); matches.Dispose(); return(limitMatches); }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnMatch_Click(object sender, EventArgs e) { long matchTime; using (Mat modelImage = CvInvoke.Imread("Template.png", ImreadModes.Grayscale)) using (Mat observedImage = CvInvoke.Imread("1456141019_地理_p1.tif", ImreadModes.Grayscale)) { VectorOfKeyPoint vectorOfKeyPoint = new VectorOfKeyPoint(); VectorOfKeyPoint vectorOfKeyPointObserved = new VectorOfKeyPoint(); VectorOfVectorOfDMatch vectorOfDMatch = new VectorOfVectorOfDMatch(); MDMatch[][] mDMatch = vectorOfDMatch.ToArrayOfArray(); Mat mask = null; Mat homography = null; DrawMatches.FindMatch(modelImage, observedImage, out matchTime, out vectorOfKeyPoint, out vectorOfKeyPointObserved, vectorOfDMatch, out mask, out homography); } }
private string[] FindFeatureMatches(string sourceFile, string[] targetFiles, ObservableCollection <string> output) { //Emgu.CV.CvInvoke // Currently we are only interested in jpg files. targetFiles = targetFiles .Where(targetFile => { var extension = Path.GetExtension(targetFile).ToLower(); return(extension == ".jpg" || extension == ".jpeg"); }) .ToArray(); var matchingFiles = new List <string>(); using var sourceImage = CvInvoke.Imread(sourceFile, ImreadModes.Grayscale); using var sourceMat = new GpuMat(); //CudaInvoke.CvtColor(sourceImage, sourceMat, ColorConversion.Bgr2Bgra); sourceMat.Upload(sourceImage); using var sourceDescriptors = new GpuMat(); using var detector = new CudaORBDetector(); var sourceKeyPoints = detector.Detect(sourceMat, null); detector.Compute(sourceMat, new VectorOfKeyPoint(sourceKeyPoints), sourceDescriptors); //detector.DetectAndCompute(sourceImage, null, sourceKeyPoints, sourceDescriptors, false); Parallel.ForEach(targetFiles, new ParallelOptions { MaxDegreeOfParallelism = 40 }, targetFile => { try { if (targetFile == sourceFile) { return; // No need to match the original file. } if (new FileInfo(targetFile).Length == 0) // We cannot compare empty images. { return; } using var targetImage = CvInvoke.Imread(targetFile, ImreadModes.Grayscale); using var targetMat = new GpuMat(); targetMat.Upload(targetImage); // using var difference = new Mat(); // Cv2.Subtract(sourceImage, targetImage, difference); // // Cv2.Split(difference, out var split); // var r = split[0]; // var g = split[1]; // var b = split[2]; // var completeMatch = Cv2.CountNonZero(r) == 0 && Cv2.CountNonZero(g) == 0 && Cv2.CountNonZero(b) == 0; using var targetDescriptors = new GpuMat(); //var targetKeyPoints = new VectorOfKeyPoint(); using var detector2 = new CudaORBDetector(); var targetKeyPoints = detector2.Detect(targetMat, null); detector2.Compute(targetMat, new VectorOfKeyPoint(targetKeyPoints), targetDescriptors); //detector.DetectAndCompute(targetImage, null, targetKeyPoints, targetDescriptors, false); // Needed to compensate for some crashes. // See: https://stackoverflow.com/questions/25089393/opencv-flannbasedmatcher if (sourceKeyPoints.Length >= 2 && targetKeyPoints.Length >= 2) { using var matches = new VectorOfVectorOfDMatch(); using var matcher = new CudaBFMatcher(DistanceType.Hamming); matcher.KnnMatch(sourceDescriptors, targetDescriptors, matches, KnnMatchValue); var goodPoints = matches.ToArrayOfArray().Where(match => match.Length > 1) .Where(match => match[0].Distance < match[1].Distance * MatchDistance) //.Select(match => match[0]) .ToArray(); var matchCount = sourceKeyPoints.Length >= targetKeyPoints.Length ? sourceKeyPoints.Length : targetKeyPoints.Length; var matchQuality = (float)goodPoints.Length / matchCount; if (matchQuality >= MinimumMatchQuality) { using var outputImage = new Mat(); using var scaledOutputImage = new Mat(); Features2DToolbox.DrawMatches( sourceImage, new VectorOfKeyPoint(sourceKeyPoints), targetImage, new VectorOfKeyPoint(targetKeyPoints), new VectorOfVectorOfDMatch(goodPoints), outputImage, new Bgr(System.Drawing.Color.Yellow).MCvScalar, new Bgr(System.Drawing.Color.Red).MCvScalar); CvInvoke.Resize(outputImage, scaledOutputImage, System.Drawing.Size.Empty, 0.1f, 0.1f); Application.Current?.Dispatcher?.Invoke(() => CvInvoke.Imshow("Match preview", scaledOutputImage)); //Cv2.ImWrite(targetFile + ".comparison.jpg", scaledOutputImage); var sb = new StringBuilder(); sb.AppendLine($"{DateTime.Now} Matching:"); sb.AppendLine($"Source: {sourceFile}"); sb.AppendLine($"Target: {targetFile}"); sb.Append($"Match found with quality: {matchQuality}"); output.Add(sb.ToString()); } } } catch (Exception e) { var sb = new StringBuilder(); var exception = e.ToString().Replace(Environment.NewLine, " "); sb.Append($"{DateTime.Now} Unable to match file: {targetFile}: {exception}"); output.Add(sb.ToString()); } }); return(matchingFiles.ToArray()); }
private void FindMatches(IFeatureMatcher matcher, DescriptorModel leftDescriptor, DescriptorModel rightDescriptor, bool AddToList = true, bool FilterMatches = true, bool ComputeHomography = true, bool SaveInMatchNode = true) { WindowsFormHelper.AddLogToConsole($"Start computing matches for: \n" + $"\t{leftDescriptor.KeyPoint.InputFile.fileInfo.Name.ToString()}\n" + $"\t{rightDescriptor.KeyPoint.InputFile.fileInfo.Name.ToString()}\n"); var foundedMatch = new DescriptorsMatchModel() { FilteredMatch = FilterMatches, LeftDescriptor = leftDescriptor, RightDescriptor = rightDescriptor }; var matches = new VectorOfVectorOfDMatch(); matcher.Match(leftDescriptor.Descriptors, rightDescriptor.Descriptors, matches); WindowsFormHelper.AddLogToConsole( $"FINISH computing matches for: \n" + $"\t{leftDescriptor.KeyPoint.InputFile.fileInfo.Name.ToString()}\n" + $"\t{rightDescriptor.KeyPoint.InputFile.fileInfo.Name.ToString()}\n" ); MDMatch[][] matchesArray = matches.ToArrayOfArray(); foundedMatch.MatchesList = matchesArray.ToList(); if (FilterMatches) { FindMinMaxDistInMatches(matchesArray, ref ms_MAX_DIST, ref ms_MIN_DIST); List <MDMatch[]> filteredMatchesList = FilterMatchesByMaxDist(matchesArray); foundedMatch.FilteredMatchesList = filteredMatchesList; } if (ComputeHomography) { var PerspectiveMatrix = new Mat(); Mat Mask = new Mat(); lock (locker) { var matchesForHomography = FilterMatches ? foundedMatch.FilteredMatchesList : foundedMatch.MatchesList; if (matchesForHomography.Count > 0) { PerspectiveMatrix = FindHomography(leftDescriptor.KeyPoint.DetectedKeyPoints, rightDescriptor.KeyPoint.DetectedKeyPoints, FilterMatches ? foundedMatch.FilteredMatchesList : foundedMatch.MatchesList, Mask); foundedMatch.Mask = Mask; foundedMatch.PerspectiveMatrix = PerspectiveMatrix; } } } // Save drawing image Mat output = new Mat(); Directory.CreateDirectory($@"{tempDirectory}\DrawMatches"); Features2DToolbox.DrawMatches(new Mat(foundedMatch.LeftDescriptor.KeyPoint.InputFile.fileInfo.FullName), foundedMatch.LeftDescriptor.KeyPoint.DetectedKeyPoints, new Mat(foundedMatch.RightDescriptor.KeyPoint.InputFile.fileInfo.FullName), foundedMatch.RightDescriptor.KeyPoint.DetectedKeyPoints, new VectorOfVectorOfDMatch(foundedMatch.FilteredMatchesList.ToArray()), output, new MCvScalar(0, 0, 255), new MCvScalar(0, 255, 0), foundedMatch.Mask); output.Save(Path.Combine($@"{tempDirectory}\DrawMatches", $"{Path.GetFileNameWithoutExtension(foundedMatch.RightDescriptor.KeyPoint.InputFile.fileInfo.Name)}-{Path.GetFileNameWithoutExtension(foundedMatch.LeftDescriptor.KeyPoint.InputFile.fileInfo.Name)}.JPG")); fileManager.listViewerModel._lastDrawnMatches = new Image <Bgr, byte>(output.Bitmap); var inputFile = new InputFileModel(Path.Combine($@"{tempDirectory}\DrawMatches", $"{Path.GetFileNameWithoutExtension(foundedMatch.RightDescriptor.KeyPoint.InputFile.fileInfo.Name)}-{Path.GetFileNameWithoutExtension(foundedMatch.LeftDescriptor.KeyPoint.InputFile.fileInfo.Name)}.JPG")); var imageList = _winForm.ImageList[(int)EListViewGroup.DrawnMatches]; var listViewer = _winForm.ListViews[(int)EListViewGroup.DrawnMatches]; fileManager.AddInputFileToList(inputFile, fileManager.listViewerModel.ListOfListInputFolder[(int)EListViewGroup.DrawnMatches], imageList, listViewer); if (SaveInMatchNode) { SaveMatchString(foundedMatch, true); } if (AddToList) { FoundedMatches.Add(foundedMatch); } }
/// <summary> /// Filter the matched Features, such that if a match is not unique, it is rejected. /// </summary> /// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param> /// <param name="mask">This is both input and output. This matrix indicates which row is valid for the matches.</param> /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param> public static void VoteForUniqueness(VectorOfVectorOfDMatch matches, double uniquenessThreshold, Mat mask) { MDMatch[][] mArr = matches.ToArrayOfArray(); byte[] maskData = new byte[mArr.Length]; GCHandle maskHandle = GCHandle.Alloc(maskData, GCHandleType.Pinned); using (Mat m = new Mat(mArr.Length, 1, DepthType.Cv8U, 1, maskHandle.AddrOfPinnedObject(), 1)) { mask.CopyTo(m); for (int i = 0; i < mArr.Length; i++) { if (maskData[i] != 0 && (mArr[i][0].Distance / mArr[i][1].Distance) <= uniquenessThreshold) { maskData[i] = (byte)255; } } m.CopyTo(mask); } maskHandle.Free(); }
void NewORBDetector() { float ms_MIN_RATIO = 100; float ms_MAX_DIST = 100; (Image <Bgr, byte> Image, VectorOfKeyPoint Keypoints, Mat Descriptors)_imgModel = (new Image <Bgr, byte>(@"C:\Images\ImgModel.jpg").Resize(0.2, Inter.Area), new VectorOfKeyPoint(), new Mat()); (Image <Bgr, byte> Image, VectorOfKeyPoint Keypoints, Mat Descriptors)_imgTest = (new Image <Bgr, byte>(@"C:\Images\ImgTest.jpg").Resize(0.2, Inter.Area), new VectorOfKeyPoint(), new Mat()); Mat imgKeypointsModel = new Mat(); Mat imgKeypointsTest = new Mat(); Mat imgMatches = new Mat(); Mat imgWarped = new Mat(); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); VectorOfVectorOfDMatch filteredMatches = new VectorOfVectorOfDMatch(); List <MDMatch[]> filteredMatchesList = new List <MDMatch[]>(); ORBDetector _ORB = new ORBDetector(); BFMatcher _BFMatcher = new BFMatcher(DistanceType.Hamming2); _ORB.DetectAndCompute(_imgModel.Image, null, _imgModel.Keypoints, _imgModel.Descriptors, false); _ORB.DetectAndCompute(_imgTest.Image, null, _imgTest.Keypoints, _imgTest.Descriptors, false); _BFMatcher.Add(_imgModel.Descriptors); _BFMatcher.KnnMatch(_imgTest.Descriptors, matches, k: 2, mask: null); MDMatch[][] matchesArray = matches.ToArrayOfArray(); //Apply ratio test for (int i = 0; i < matchesArray.Length; i++) { MDMatch first = matchesArray[i][0]; float dist1 = matchesArray[i][0].Distance; float dist2 = matchesArray[i][1].Distance; if (dist1 < ms_MIN_RATIO * dist2) { filteredMatchesList.Add(matchesArray[i]); } } //Filter by threshold MDMatch[][] defCopy = new MDMatch[filteredMatchesList.Count][]; filteredMatchesList.CopyTo(defCopy); filteredMatchesList = new List <MDMatch[]>(); foreach (var item in defCopy) { if (item[0].Distance < ms_MAX_DIST) { filteredMatchesList.Add(item); } } filteredMatches = new VectorOfVectorOfDMatch(filteredMatchesList.ToArray()); Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(_imgModel.Keypoints, _imgTest.Keypoints, filteredMatches, null, 10); CvInvoke.WarpPerspective(_imgTest.Image, imgWarped, homography, _imgTest.Image.Size); Features2DToolbox.DrawKeypoints(_imgModel.Image, _imgModel.Keypoints, imgKeypointsModel, new Bgr(0, 0, 255)); Features2DToolbox.DrawKeypoints(_imgTest.Image, _imgTest.Keypoints, imgKeypointsTest, new Bgr(0, 0, 255)); Features2DToolbox.DrawMatches(_imgModel.Image, _imgModel.Keypoints, _imgTest.Image, _imgTest.Keypoints, filteredMatches, imgMatches, new MCvScalar(0, 255, 0), new MCvScalar(0, 0, 255), null, Features2DToolbox.KeypointDrawType.Default); Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgKeypointsModel, "Keypoints Model")); Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgKeypointsTest, "Keypoints Test")); Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgMatches, "Matches")); Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgWarped, "Warp")); }
public void run(Image <Gray, byte> image) { Stopwatch sw = new Stopwatch(); sw.Start(); Mat mask = null; _imageInput = image; VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint(); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); Image <Gray, byte> _image = imageInput.Clone(); for (int i = 0; i < actionAccurateSearchData.time; i++) { _image = _image.PyrDown(); } if (0 != actionAccurateSearchData.InputAOIWidth && 0 != actionAccurateSearchData.InputAOIHeight) { _image.ROI = new Rectangle(actionAccurateSearchData.InputAOIX, actionAccurateSearchData.InputAOIY, actionAccurateSearchData.InputAOIWidth, actionAccurateSearchData.InputAOIHeight); } UMat b1 = _image.ToUMat(); UMat observedDescriptors = new UMat(); //进行检测和计算,把opencv中的两部分和到一起了,分开用也可以 surf.DetectAndCompute(b1, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2Sqr); //开始进行匹配 matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, 2, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); //去除重复的匹配 Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); byte[] maskForSizeAndOrientation = mask.GetData(); MDMatch[][] vectorOfDMatch = matches.ToArrayOfArray(); bool keepDetect = true; Mat result1 = new Mat(); result1 = _imageInput.Convert <Gray, byte>().Mat; Features2DToolbox.DrawMatches(_imageTempleAOI.Convert <Gray, byte>().Mat, modelKeyPoints, _image.Convert <Gray, byte>().Mat, observedKeyPoints, matches, result1, new MCvScalar(255, 0, 255), new MCvScalar(0, 255, 255), mask); int Count = CvInvoke.CountNonZero(mask); actionRes = ActionResponse.NG; List <Position> posList; listPosition = new List <Position>(); imageCon = FindSeedPoint(observedKeyPoints, vectorOfDMatch, mask, out posList); double maxValue = 0; double minValue = 0; Point maxPoint = new Point(); Point minPoint = new Point(); Exception exception = new Exception("循环超时"); while (keepDetect) { CvInvoke.MinMaxLoc(imageCon, ref minValue, ref maxValue, ref minPoint, ref maxPoint); if (maxValue > 10) { List <double[]> angleList = new List <double[]>(); double angleSin = 0; double angleCos = 0; double angle = 0; List <int> status = new List <int>(); int count = 0; int totalCount = 0; //求出角度 foreach (Position p in posList) { if (Math.Abs(p.X - maxPoint.X) < 16 && Math.Abs(p.Y - maxPoint.Y) < 16) { double[] item = new double[2]; item[0] = p.sin; item[1] = p.cos; angleList.Add(item); status.Add(1); } } bool keepCalcurateAngle = true; int numberOfItem = status.Count; Random random = new Random(); while (keepCalcurateAngle) { int a = -1; int b = -1; bool keepGetRd = true; int raCount = 0; while (keepGetRd) { raCount++; if (raCount > 10000) { throw (new Exception("角度偏差过大1")); } int rd = random.Next(status.Count - 1); if (0 != status[rd] && rd != a) { if (-1 == a) { a = rd; } else { b = rd; keepGetRd = false; } } } if (Math.Abs(angleList[a][0] - angleList[b][0]) < 0.1 && Math.Abs(angleList[a][1] - angleList[b][1]) < 0.1) { ++count; angleSin = angleSin + angleList[a][0]; angleCos = angleCos + angleList[a][1]; } else { angleSin = 0; angleCos = 0; count = 0; status[a] = 0; status[b] = 0; numberOfItem = numberOfItem - 2; } if (count > 99) { keepCalcurateAngle = false; } if (numberOfItem < status.Count / 3 | numberOfItem < 5) { angleSin = 0; angleCos = 0; count = 0; keepCalcurateAngle = false; //throw (new Exception("角度计算错误")); } totalCount++; if (totalCount > 2000) { throw (new Exception("角度偏差过大2")); } } if (Math.Abs(angleSin) > Math.Abs(angleCos)) { angle = 180 * Math.Acos(angleCos / 100) / Math.PI; if (angleSin < 0) { angle = 360 - angle; } } else { angle = 180 * Math.Asin(angleSin / 100) / Math.PI; if (angleCos < 0) { angle = 180 - angle; } else { if (angle < 0) { angle = 360 + angle; } } } listPosition.Add(new Position(maxPoint.X, maxPoint.Y, angle, angleSin / 100, angleCos / 100)); //画框 Point[] rectPoint = new Point[4]; int CenterX = _imageTempleAOI.Width / 2;//模版尺寸 int CenterY = _imageTempleAOI.Height / 2; double sin = angleSin / 100; double Cos = angleCos / 100; rectPoint[0].X = (int)(maxPoint.X - CenterX * Cos + CenterY * sin); rectPoint[0].Y = (int)(maxPoint.Y - CenterY * Cos - CenterX * sin); rectPoint[1].X = (int)(maxPoint.X - CenterX * Cos - CenterY * sin); rectPoint[1].Y = (int)(maxPoint.Y + CenterY * Cos - CenterX * sin); rectPoint[2].X = (int)(maxPoint.X + CenterX * Cos - CenterY * sin); rectPoint[2].Y = (int)(maxPoint.Y + CenterY * Cos + CenterX * sin); rectPoint[3].X = (int)(maxPoint.X + CenterX * Cos + CenterY * sin); rectPoint[3].Y = (int)(maxPoint.Y - CenterY * Cos + CenterX * sin); VectorOfPoint vp = new VectorOfPoint(rectPoint); CvInvoke.Polylines(result1, vp, true, new MCvScalar(255, 0, 0, 255), 2); //覆盖值 int leng = Math.Min(_imageTempleAOI.Width, _imageTempleAOI.Height); int startPX = maxPoint.X - leng / 2; int startPY = maxPoint.Y - leng / 2; Parallel.For(0, leng, item => { for (int i = 0; i < leng; i++) { imageCon.Data[startPY + i, startPX + item, 0] = 0; } }); } else { keepDetect = false; } } imageDescript = result1; actionRes = ActionResponse.OK; sw.Stop(); }