public FeatureMatchingPage() : base() { var button = this.GetButton(); button.Text = "Perform Feature Matching"; button.Clicked += OnButtonClicked; OnImagesLoaded += async(sender, images) => { GetLabel().Text = "Please wait..."; SetImage(null); Task <Tuple <Mat, long> > t = new Task <Tuple <Mat, long> >( () => { long time; Mat matchResult = DrawMatches.Draw(images[0], images[1], out time); return(new Tuple <Mat, long>(matchResult, time)); }); t.Start(); var result = await t; foreach (var img in images) { img.Dispose(); } SetImage(t.Result.Item1); String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU"; GetLabel().Text = String.Format("Detected with {1} in {0} milliseconds.", t.Result.Item2, computeDevice); }; }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); AppPreference preference = new AppPreference(); OnButtonClick += delegate { CvInvoke.UseOpenCL = preference.UseOpenCL; String oclDeviceName = preference.OpenClDeviceName; if (!String.IsNullOrEmpty(oclDeviceName)) { CvInvoke.OclSetDefaultDevice(oclDeviceName); } long time; using (Mat box = Assets.GetMat("box.png")) using (Mat boxInScene = Assets.GetMat("box_in_scene.png")) using (Mat result = DrawMatches.Draw(box, boxInScene, out time)) { SetImageBitmap(result.ToBitmap(Bitmap.Config.Rgb565)); String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Emgu.CV.Ocl.Device.Default.Name : "CPU"; SetMessage(String.Format("Matched with '{0}' in {1} milliseconds.", computeDevice, time)); } }; }
private async void OnButtonClicked(Object sender, EventArgs args) { Mat[] images = await LoadImages(new String[] { "box.png", "box_in_scene.png" }, new string[] { "Pick a model image from", "Pick a observed image from" }); if (images == null || images[0] == null || images[1] == null) { return; } SetMessage("Please wait..."); SetImage(null); Task <Tuple <Mat, long> > t = new Task <Tuple <Mat, long> >( () => { long time; Mat matchResult = DrawMatches.Draw(images[0], images[1], out time); return(new Tuple <Mat, long>(matchResult, time)); }); t.Start(); var result = await t; foreach (var img in images) { img.Dispose(); } SetImage(t.Result.Item1); String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU"; SetMessage(String.Format("Detected with {1} in {0} milliseconds.", t.Result.Item2, computeDevice)); }
public override void ViewDidLoad() { base.ViewDidLoad(); ButtonText = "Match"; base.OnButtonClick += delegate { long processingTime; Size frameSize = FrameSize; using (Mat modelImage = CvInvoke.Imread("box.png", Emgu.CV.CvEnum.ImreadModes.Grayscale)) using (Mat observedImage = CvInvoke.Imread("box_in_scene.png", Emgu.CV.CvEnum.ImreadModes.Grayscale)) using (Emgu.CV.Features2D.KAZE kaze = new Emgu.CV.Features2D.KAZE()) using (Mat image = DrawMatches.Draw(modelImage, observedImage, kaze, out processingTime)) using (Mat resized = new Mat()) { double dx = ((double)frameSize.Width) / image.Width; double dy = ((double)frameSize.Height) / image.Height; double min = Math.Min(dx, dy); CvInvoke.Resize(image, resized, Size.Empty, min, min); //image.Resize(frameSize.Width, frameSize.Height, Emgu.CV.CvEnum.Inter.Nearest, true) MessageText = String.Format("Matching Time: {0} milliseconds.", processingTime); SetImage(resized); } }; }
void _view_GoButtonPressed(object sender, EventArgs e) { DrawMatches draw = new DrawMatches(); var m = new Image <Gray, Byte>(@"E:\Dev\LogoRec\LogoRec.Forms\bin\Debug\box.png"); if (_thread != null && _thread.IsAlive) { _thread.Abort(); } _thread = new Thread(() => { foreach ( var img in _frameProvider.GetFrames( @"G:\Sin.City.A.Dame.To.Kill.For.2014.720p.BRRip.XviD.AC3.5.1\Sin.City.A.Dame.To.Kill.For.2014.720p.BRRip.XviD.AC3.5.1.avi") ) { var a = img.Img.ToImage <Gray, byte>(); try { var res = draw.Draw(m, a, _model.K, _model.UniquenessThreshold, _model.HessianThresh); _model.ViewedImage = res; } catch (Exception E) { _model.ViewedImage = a.Mat; } } }); _thread.Start(); }
static public void StartProcessing(Mat image, List <TemplateContainer.ImageData> templateContainer, SortedDictionary <int, int> result) { long matchTime; int temp; int i = 0; foreach (var template in templateContainer) { temp = 0; temp = DrawMatches.MatchResult2(image, template, out matchTime); //temp = DrawMatches.MatchResult(template.image.Mat, image, out matchTime); if (temp != 0) { while (result.Keys.Contains(-temp)) { temp -= 1; } result.Add(-temp, i); Console.WriteLine("znalazle, {0}", i); } i += 1; } }
public void ProcessCap() { //hrow new NotImplementedException(); if (Capsession != null) { Capsession.Start(); while (ThreadFlag) { // frame = Capsession.QueryFrame(); Capsession.Retrieve(frame, 0); // imagebox1.Width = frame.Width * 2 / 3; // imagebox1.Height=frame.Height*2/3; if (checkBox1.Checked == true) { // imagebox1.Image = frame; result = DrawMatches.Draw(model, frame, out matchtime); imagebox1.Image = result; toolStripStatusLabel1.Text = string.Format("the matchtime is {0},the result is {1}", matchtime.ToString(), result.ToString()); } else { imagebox1.Image = frame; } } } }
/// <summary> /// Here all similarities between pictures will be showed in pop out window. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btn_DataShow(object sender, RoutedEventArgs e) { ErrInfLogger.LockInstance.InfoLog("Start of the DataShow"); try { if ((ResultsTable.Items.Count > 0) && (ResultsTable.Columns.Count > 0)) { string s_imgPath = SettingsContainer.Instance.s_Path + SettingsContainer.Instance.s_TestImgDir + txtPhotoPath2.Text + ".jpg"; long l_score; double l_matchTime; using (Mat modelImage = CvInvoke.Imread(SettingsContainer.Instance.s_Path + SettingsContainer.Instance.s_RecoImgFolder + txtPhotoPath.Text + @".jpg", ImreadModes.Color)) using (Mat observedImage = CvInvoke.Imread(s_imgPath, ImreadModes.Color)) { Mat result = DrawMatches.Draw(modelImage, observedImage, out l_matchTime, out l_score); emImageViewer iv = new emImageViewer(result, l_score); iv.Show(); } } } catch (Exception ex) { MessageBox.Show("DID YOU PROVIDE PROPER PATH TO TEXT BOX?\n" + "IN EXAMPLE FOR 'test.jpg' FILE YOU TYPE IN TEXT BOX JUST 'test'!"); ErrInfLogger.LockInstance.ErrorLog(ex.ToString()); } ErrInfLogger.LockInstance.InfoLog("End of the DataShow"); }
private void FindPointButton_Click(object sender, EventArgs e) { Mat modelImage = CvInvoke.Imread(PathToReadImage + "hough-" + TestNum + "-sample.jpg", ImreadModes.Grayscale); Mat modelImage2 = CvInvoke.Imread(PathToReadImage + "hough-" + TestNum + "-sample.jpg", ImreadModes.Grayscale); Mat observedImage = CvInvoke.Imread(PathToReadImage + "hough-" + TestNum + "-full.jpg", ImreadModes.Grayscale); long matchTime; Mat result = DrawMatches.Draw(modelImage, modelImage2, observedImage, out matchTime); capturedImageBox.Image = result; }
private void button2_Click(object sender, EventArgs e) { long matchTime; using (Mat modelImage = CvInvoke.Imread(imagen.dir.files[0], ImreadModes.Grayscale)) using (Mat observedImage = CvInvoke.Imread(imagen.dir.files[1], ImreadModes.Grayscale)) { Mat result = DrawMatches.Draw(modelImage, observedImage, out matchTime); this.segmentBox.Image = new Bitmap(result.ToImage <Rgb, byte>().Bitmap, new Size(400, 400)); } }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); OnButtonClick += delegate { long time; using (Image <Gray, Byte> box = new Image <Gray, byte>(Assets, "box.png")) using (Image <Gray, Byte> boxInScene = new Image <Gray, byte>(Assets, "box_in_scene.png")) using (Mat result = DrawMatches.Draw(box.Mat, boxInScene.Mat, out time)) { SetImageBitmap(result.ToBitmap(Bitmap.Config.Rgb565)); SetMessage(String.Format("Matched in {0} milliseconds.", time)); } }; }
public MainWindow() { InitializeComponent(); System.Windows.Forms.Application.EnableVisualStyles(); System.Windows.Forms.Application.SetCompatibleTextRenderingDefault(false); long matchTime; long score; using (Mat modelImage = CvInvoke.Imread("../../images/im2.png", ImreadModes.Grayscale)) using (Mat observedImage = CvInvoke.Imread("../../images/im1.jpg", ImreadModes.Grayscale)) { Mat result = DrawMatches.Draw(modelImage, observedImage, out matchTime, out score); ImageViewer.Show(result, String.Format("Score : {0} matches", score)); } }
private async void CameraButton_Clicked(object sender, EventArgs e) { DrawMatches.Test(); Console.WriteLine("Success"); try { } catch (Exception exception) { Console.WriteLine(exception); } return; if (Plugin.Media.CrossMedia.Current.IsCameraAvailable) { var photo = await Plugin.Media.CrossMedia.Current.TakePhotoAsync(new Plugin.Media.Abstractions.StoreCameraMediaOptions() { }); if (photo != null) { PhotoImage.Source = ImageSource.FromStream(() => { return(photo.GetStream()); }); } /* * var stream = photo.GetStream(); * * var image1 = EmguCVImageHelper.ConvertFromImageSource(stream); * var image2 = EmguCVImageHelper.ConvertFromImageSource(stream); * * long calTime = 0; * var result = DrawMatches.Draw(image1, image2, out calTime); * * PhotoImage.Source = ImageSource.FromStream(() => EmguCVImageHelper.ConvertFromEmguCVImage(result)); */ } else { // await DisplayAlert("Cannot find camera.", "Error", "OK"); } }
public override void ViewDidLoad() { base.ViewDidLoad(); ButtonText = "Match"; base.OnButtonClick += delegate { long processingTime; Size frameSize = FrameSize; using (Image <Gray, byte> modelImage = new Image <Gray, byte>("box.png")) using (Image <Gray, byte> observedImage = new Image <Gray, byte>("box_in_scene.png")) using (Image <Bgr, Byte> image = DrawMatches.Draw(modelImage, observedImage, out processingTime)) using (Image <Bgr, Byte> resized = image.Resize(frameSize.Width, frameSize.Height, Emgu.CV.CvEnum.Inter.Nearest, true)) { MessageText = String.Format("Matching Time: {0} milliseconds.", processingTime); SetImage(resized); } }; }
/// <summary> /// The method responsible for image calculations (among others the image similarity score calculations and time of mething calc. /// </summary> /// <param name="s_testImage"> The test image directory. </param> /// <param name="s_basicImage"> The basic library image directory. </param> /// <returns> Method returns a tuple, which contains score and matching time fields. </returns> KeyValuePair <long, double> ComputeImage(string s_testImage, string s_basicImage) { ErrInfLogger.LockInstance.InfoLog("Start of the ComputeImages"); long l_score; double l_matchTime; using (Mat m_modelImage = CvInvoke.Imread(s_basicImage, ImreadModes.Color)) { using (Mat m_observedImage = CvInvoke.Imread(s_testImage, ImreadModes.Color)) { Mat m_homography; VectorOfKeyPoint v_modelKeyPoints; VectorOfKeyPoint v_observedKeyPoints; using (VectorOfVectorOfDMatch v_matches = new VectorOfVectorOfDMatch()) { Mat m_mask; DrawMatches.FindMatch(m_modelImage, m_observedImage, out l_matchTime, out v_modelKeyPoints, out v_observedKeyPoints, v_matches, out m_mask, out m_homography, out l_score); string s_score = "The score obtained is " + l_score.ToString(); string s_matchTime = "The time to obtain score is " + l_matchTime.ToString() + " ms"; ErrInfLogger.LockInstance.ScoreLog(s_score); ErrInfLogger.LockInstance.MTimeLog(s_matchTime); } l_imgList.Add(new ImageParameters() { s_ImagePath = s_testImage.Remove(0, SettingsContainer.Instance.i_TestImageOffset), s_ImageBasicPath = s_basicImage.Remove(0, SettingsContainer.Instance.i_BasicImageOffset), l_Score = l_score, l_MatchTime = l_matchTime }); } } ErrInfLogger.LockInstance.InfoLog("End of the ComputeImages"); return(new KeyValuePair <long, double>(l_score, l_matchTime)); }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnMatch_Click(object sender, EventArgs e) { long matchTime; using (Mat modelImage = CvInvoke.Imread("Template.png", ImreadModes.Grayscale)) using (Mat observedImage = CvInvoke.Imread("1456141019_地理_p1.tif", ImreadModes.Grayscale)) { VectorOfKeyPoint vectorOfKeyPoint = new VectorOfKeyPoint(); VectorOfKeyPoint vectorOfKeyPointObserved = new VectorOfKeyPoint(); VectorOfVectorOfDMatch vectorOfDMatch = new VectorOfVectorOfDMatch(); MDMatch[][] mDMatch = vectorOfDMatch.ToArrayOfArray(); Mat mask = null; Mat homography = null; DrawMatches.FindMatch(modelImage, observedImage, out matchTime, out vectorOfKeyPoint, out vectorOfKeyPointObserved, vectorOfDMatch, out mask, out homography); } }
public void Apply(string inputPath, double uniquenessThresh, int tm, int hessianThresh) { Stopwatch watch = Stopwatch.StartNew(); ArrayList allMatches = new ArrayList(); Tuple <Image <Bgr, byte>, float[]> drawResult; float[] recStat; // Handling file names. string topic = ""; TextWriter log = File.AppendText(inputPath + topic + "/log.txt"); // Read images. Image <Bgr, Byte> element = new Image <Bgr, Byte>(string.Format("{0}{1}/in/element.png", inputPath, topic)); Image <Bgr, Byte> test = new Image <Bgr, Byte>(string.Format("{0}{1}/in/test.png", inputPath, topic)); Bitmap window = test.ToBitmap(); // Convert to gray-level images and save. Image <Gray, Byte> gElement = element.Convert <Gray, Byte>(); Image <Gray, Byte> gTest = test.Convert <Gray, Byte>(); gElement.Save(string.Format("{0}{1}/in/g-element.png", inputPath, topic)); gTest.Save(string.Format("{0}{1}/in/g-test.png", inputPath, topic)); // Get image dimensions. int wfactor = 2; // The size of the element image. int ex = element.Width; int ey = element.Height; // The size of the test image. int tx = test.Width; int ty = test.Height; // The distance that the sliding window shifts. int xshift = tx / ex / wfactor * 2 - 1; int yshift = ty / ey / wfactor * 2 - 1; log.WriteLine(string.Format("Element Image: ({0}*{1})\nTest Image:({2}*{3})\n", ex, ey, tx, ty)); for (int j = 0; j < yshift; j++) { for (int i = 0; i < xshift; i++) { int xstart = i * ex * wfactor / 2; int ystart = j * ey * wfactor / 2; int counter = i + j * xshift; Rectangle r = new Rectangle(xstart, ystart, ex * wfactor, ey * wfactor); Image <Bgr, Byte> pTest = new Image <Bgr, Byte>(window.Clone(r, window.PixelFormat)); pTest.Save(string.Format("{0}{1}/in/part.jpg", inputPath, topic)); drawResult = DrawMatches.Draw(gElement, pTest.Convert <Gray, Byte>(), test, xstart, ystart, inputPath, topic, counter, log, uniquenessThresh, tm, hessianThresh); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// log.WriteLine(string.Format("\n\nSub-image #{0}:\n\tLoop #({1}, {2})\n\tSW1 location: ({3}, {4})", counter, i, j, xstart, ystart)); test = drawResult.Item1; recStat = drawResult.Item2; if (recStat[2] > 0) { allMatches.Add(recStat); log.WriteLine(string.Format("\n\tSW2 location: ({0}, {1})\n\tHistogram score: {2}]", recStat[0], recStat[1], recStat[2])); } } } log.WriteLine("The count before consolidation: " + allMatches.Count); HashSet <float[]> hash0 = DrawMatches.Consolidate(allMatches, gElement.Width - 1, gElement.Height - 1, log); ArrayList al = new ArrayList(); foreach (float[] i in hash0) { al.Add(i); } HashSet <float[]> hash = DrawMatches.Consolidate(al, gElement.Width - 1, gElement.Height - 1, log); log.WriteLine("The count after consolidation: " + hash.Count); //Blue TextWriter coordinatesOnMapBlue = File.AppendText(inputPath + topic + "/coordinatesOnMapBlue.txt"); foreach (float[] i in hash) { test.Draw(new Rectangle(new Point((int)i[0], (int)i[1]), gElement.Size), new Bgr(Color.Blue), 5); coordinatesOnMapBlue.WriteLine("x= " + (int)i[0] + ", y=" + (int)i[1] + ""); } coordinatesOnMapBlue.Close(); test.Save(string.Format("{0}{1}/out.jpg", inputPath, topic)); watch.Stop(); log.WriteLine(watch.Elapsed); log.Close(); }