private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs) { var capture = (Capture)sender; //Show time stamp double timeIndex = capture.GetCaptureProperty(CapProp.PosMsec); ProgressTime = TimeSpan.FromMilliseconds(timeIndex).ToString("g"); //show frame number double frameNumber = capture.GetCaptureProperty(CapProp.PosFrames); double totalFrames = capture.GetCaptureProperty(CapProp.FrameCount); _progress = frameNumber / totalFrames; RaisePropertyChanged("Progress"); // Show image with keyPoints var frame = new Mat(); _capture.Retrieve(frame); var keyFeatures = _projectFile.Model.GetKeyFeatures((int)frameNumber - 1); var imageFrame = new Mat(); Features2DToolbox.DrawKeypoints(frame, keyFeatures, imageFrame, new Bgr(Color.DarkBlue), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints); if (frameNumber > 1) { var matches = _projectFile.Model.GetMatches((int)frameNumber - 1); foreach (var match in matches) { CvInvoke.Line(imageFrame, Point.Round(match.Item1.Point), Point.Round(match.Item2.Point), new Bgr(Color.Red).MCvScalar, 2); } } OriginImage = VideoImageSource = imageFrame; //Wait to display correct framerate var frameRate = capture.GetCaptureProperty(CapProp.Fps); var rightElapsedMilliseconds = 1000.0 / frameRate; var realElapsedMilliseconds = _stopwatch.ElapsedMilliseconds; var waitingMilliseconds = Math.Max(0, rightElapsedMilliseconds - realElapsedMilliseconds); Thread.Sleep((int)waitingMilliseconds); _stopwatch.Restart(); if (frameNumber == totalFrames) { Stop(); } }
public void ShowPointInformation(double x, double y) { if (_capture == null) { return; } int frameIndex = (int)_capture.GetCaptureProperty(CapProp.PosFrames) - 1; if (frameIndex == -1) { return; } var keyFeaturesVector = _projectFile.Model.GetKeyFeatures(frameIndex); var nearestKeyFeature = GetNearestKeyPoint(x, y, keyFeaturesVector); var keyIndex = keyFeaturesVector.FirstIndexOf(keyFeature => keyFeature.Point == nearestKeyFeature.Point); var chain = _projectFile.Model.GetChain(frameIndex, keyIndex); int firstFrame = frameIndex; int lastFrame = frameIndex; foreach (var pair in chain) { firstFrame = Math.Min(firstFrame, pair.Item1); lastFrame = Math.Max(lastFrame, pair.Item1); } PointInformation = String.Format(Strings.PointInformationFormat, firstFrame, frameIndex, lastFrame); var image = (IImage)OriginImage.Clone(); CvInvoke.Circle(image, Point.Round(nearestKeyFeature.Point), 5, new Bgr(Color.Yellow).MCvScalar, 2); VideoImageSource = image; }
private void LinearSmoothMove(Point newPosition, int steps) { Point start = new Point(System.Windows.Forms.Cursor.Position.X, System.Windows.Forms.Cursor.Position.Y); PointF iterPoint = start; // Find the slope of the line segment defined by start and newPosition PointF slope = new PointF(newPosition.X - start.X, newPosition.Y - start.Y); // Divide by the number of steps slope.X = slope.X / steps; slope.Y = slope.Y / steps; // Move the mouse to each iterative point. for (int i = 0; i < steps; i++) { iterPoint = new PointF(iterPoint.X + slope.X, iterPoint.Y + slope.Y); System.Windows.Forms.Cursor.Position = Point.Round(iterPoint); Thread.Sleep(10); } // Move the mouse to the final destination. System.Windows.Forms.Cursor.Position = newPosition; }
public void ShowContextMenuStrip(ViewTypes viewType, Graphics.Point containerLocation) { Point p = Point.Round(containerLocation); switch (viewType) { default: case ViewTypes.Cells: if (this.control.ContextMenuStrip != null) { this.control.ContextMenuStrip.Show(this.control, p); } break; case ViewTypes.ColumnHeader: if (this.control.columnHeaderContextMenuStrip != null) { this.control.columnHeaderContextMenuStrip.Show(this.control, p); } break; case ViewTypes.RowHeader: if (this.control.rowHeaderContextMenuStrip != null) { this.control.rowHeaderContextMenuStrip.Show(this.control, p); } break; case ViewTypes.LeadHeader: if (this.control.leadHeaderContextMenuStrip != null) { this.control.leadHeaderContextMenuStrip.Show(this.control, p); } break; } }
private void Probe(Point sourcePointRounded, bool showPixelValue, bool showVoiValue) { string probeString; string coordinateString = String.Format(SR.FormatProbeInfo, SR.LabelLocation, string.Format(SR.FormatCoordinates, SR.LabelNotApplicable, SR.LabelNotApplicable)); string pixelValueString = String.Format(SR.FormatProbeInfo, SR.LabelRawPixel, SR.LabelNotApplicable); string modalityLutString = String.Format(SR.FormatProbeInfo, SR.LabelModalityLut, SR.LabelNotApplicable); string voiLutString = String.Format(SR.FormatProbeInfo, SR.LabelVOILut, SR.LabelNotApplicable); try { var displayString = new StringBuilder(); if (_selectedImageGraphic.BoundingBox.Contains(sourcePointRounded)) { coordinateString = String.Format(SR.FormatProbeInfo, SR.LabelLocation, string.Format(SR.FormatCoordinates, sourcePointRounded.X, sourcePointRounded.Y)); if (_selectedImageGraphic is GrayscaleImageGraphic) { GrayscaleImageGraphic image = _selectedImageGraphic as GrayscaleImageGraphic; int pixelValue = 0; GetPixelValue(image, sourcePointRounded, ref pixelValue, ref pixelValueString); GetModalityLutValue(image, pixelValue, ref modalityLutString); GetVoiLutValue(image, pixelValue, ref voiLutString); // the modality LUT value is always shown displayString.AppendLine(modalityLutString); if (showPixelValue) { displayString.AppendLine(pixelValueString); } if (showVoiValue) { displayString.AppendLine(voiLutString); } } else if (_selectedImageGraphic is ColorImageGraphic) { ColorImageGraphic image = _selectedImageGraphic as ColorImageGraphic; Color color = image.PixelData.GetPixelAsColor(sourcePointRounded.X, sourcePointRounded.Y); string rgbFormatted = String.Format(SR.FormatRGB, color.R, color.G, color.B); pixelValueString = String.Format(SR.FormatProbeInfo, SR.LabelRGBPixel, rgbFormatted); displayString.AppendLine(pixelValueString); } } // show the coordinate last, cause it's probably the least interesting information displayString.AppendLine(coordinateString); probeString = displayString.ToString().Trim(); } catch (Exception e) { Platform.Log(LogLevel.Error, e); probeString = SR.MessageProbeToolError; } var destinationPoint = Point.Round(_selectedImageGraphic.SpatialTransform.ConvertToDestination(sourcePointRounded)); _selectedTile.InformationBox.Update(probeString, destinationPoint); }
public Graphics.Point PointToScreen(Graphics.Point p) { return(this.control.PointToScreen(Point.Round(p))); }
private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs) { var capture = (Capture)sender; var frame = new Mat(); capture.Retrieve(frame); // 1. get key points var keyPoints = new VectorOfKeyPoint(_detector.Detect(frame)); _tempCloudPoints.SetKeyFeatures(_selectedFrameIndex, keyPoints); // 2. get descripters var descripters = new Mat(); _descripter.Compute(frame, keyPoints, descripters); // draw keypoints var imageFrame = new Mat(); Features2DToolbox.DrawKeypoints(frame, keyPoints, imageFrame, new Bgr(Color.DarkBlue), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints); if (_selectedFrameIndex != 0) { var previousKeyPoints = _tempCloudPoints.GetKeyFeatures(_selectedFrameIndex - 1); var previousKeyDescripters = _previousDescripters; const int k = 2; const double uniquenessThreshold = 0.8; // 3. compute all matches with previous frame var matches = new VectorOfVectorOfDMatch(); var matcher = GetNativeMatcher(SelectedMatcher); matcher.Add(previousKeyDescripters); matcher.KnnMatch(descripters, matches, k, null); var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); Features2DToolbox.VoteForSizeAndOrientation(previousKeyPoints, keyPoints, matches, mask, 1.5, 20); Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(previousKeyPoints, keyPoints, matches, mask, 2); var managedMask = mask.GetData(); // 4. separate good matches var currentKeys = keyPoints; for (int i = 0; i < matches.Size; i++) { var match = matches[i][0]; // filter wrong matches if (managedMask[i] == 1) { var previousIndex = match.TrainIdx; var currentIndex = match.QueryIdx; var previousPoint = previousKeyPoints[previousIndex].Point; var currentPoint = currentKeys[currentIndex].Point; _tempCloudPoints.Unite(_selectedFrameIndex - 1, previousIndex, _selectedFrameIndex, currentIndex); CvInvoke.Line(imageFrame, Point.Round(previousPoint), Point.Round(currentPoint), new Bgr(Color.Red).MCvScalar, 2); } } } _previousDescripters = descripters; PreviewImageSource = imageFrame; _selectedFrameIndex++; RaisePropertyChanged("Progress"); RaisePropertyChanged("ProgressText"); if (_selectedFrameIndex == _framesCount) { GeneratingStates = FeatureGeneratingStates.Finished; } }