public void newStep(System.Drawing.Image img) { Bitmap bmp = new Bitmap(img); Tracking_Images.Add(bmp); Tracking_Images[Tracking_Images.Count - 1].Tag = DateTime.Now.ToString("hh:mm:ss"); // Firstly we will find the best matching repositioning to the images by selecting the one that minimizes the edges difference // This is needed because of the Sprout's image acquisition impresition, which leads to minor mismatches between the same object's captures int[] positions = Matching_Robinson.RobinsonRepositioning(background, bmp); // Once the reposition is calculated, a difference between the taken image and the background is performed // For this difference, Manhattan difference computation has been used. However, Eucliedean distance computation and // Pearson's correlation index difference computation functions are included in the code and ready to be used. Map resultDifference = DifferenceComputation.getManhattan(background, bmp, positions); // A cluster search is computed List <Cluster> frameBlobs = FindObjects(resultDifference, DateTime.Now.ToString("hh:mm:ss")); // If this is the first tracking step made, the found clusters are set as base ones, // otherwise a tracking algorithm trying to match current clusters with already monitored ones is performed if (track.isEmpty()) { track.firstScan(frameBlobs, step); events = new int[0]; } else { events = track.assignBlobs(frameBlobs, step); } step++; events = checkBounds(events, track.getLast(), bmp); }
// In each step, colony tracking is performed by comparing the current taken image with the // first one (serving as background) and the last one in the look for changes public void newStep(object param) { while (analysisInProgress) { Thread.Sleep(1000); } analysisInProgress = true; object[] objects = (object[])param; System.Drawing.Bitmap bmp = (System.Drawing.Bitmap)objects[0]; CaptureWindow cw = (CaptureWindow)objects[1]; Tracking_Images.Add(bmp); Tracking_Images[Tracking_Images.Count - 1].Tag = DateTime.Now.ToString("hh:mm:ss"); // Firstly we will find the best matching repositioning to the images by selecting the one that minimizes the edges difference // This is needed because of the Sprout's image acquisition impresition, which leads to minor mismatches between the same object's captures int[] positions = Matching_Robinson.RobinsonRepositioning(background, bmp); // Once the reposition is calculated, a difference between the taken image and the background is performed // For this difference, Manhattan difference computation has been used. However, Eucliedean distance computation and // Pearson's correlation index difference computation functions are included in the code and ready to be used. Map resultDifference = DifferenceComputation.getManhattan(background, bmp, positions); // A cluster search is computed List <Cluster> frameBlobs = FindObjects(resultDifference, DateTime.Now.ToString("hh:mm:ss")); // If this is the first tracking step made, the found clusters are set as base ones, // otherwise a tracking algorithm trying to match current clusters with already monitored ones is performed if (track.isEmpty()) { track.firstScan(frameBlobs, step); events = new int[0]; } else { events = track.assignBlobs(frameBlobs, step); } events = checkBounds(events, track.getLast(), bmp); if (aw.getPicker().classAnalysis&& !aw.getClass().hasError()) { aw.getClass().newStep(track.getLast(), bmp); App.Current.Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.ApplicationIdle, new Action(() => cw.processClassification())); } step++; App.Current.Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.ApplicationIdle, new Action(() => cw.processEvents())); analysisInProgress = false; }