public System.Drawing.PointF GetRelativePosition(Control c) { System.Drawing.Point relPoint; System.Drawing.PointF relPointF; relPoint = c.PointToClient(new System.Drawing.Point((int)ScreenX,(int)ScreenY)); relPointF = new System.Drawing.PointF(relPoint.X + ScreenX - (float)((int)ScreenX), relPoint.Y + ScreenY - (float)((int)ScreenY)); return relPointF; }
/// <summary> /// Method to render the arrow /// </summary> /// <param name="map">The map</param> /// <param name="graphics">The graphics object</param> /// <param name="arrow">The arrow</param> private void RenderArrow(SharpMap.Map map, System.Drawing.Graphics graphics, GeoAPI.Geometries.ILineString arrow) { var pts = new System.Drawing.PointF[arrow.Coordinates.Length]; for (var i = 0; i < pts.Length; i++) pts[i] = map.WorldToImage(arrow.GetCoordinateN(i)); graphics.DrawLines(ArrowPen, pts); }
public PointF(float x, float y) { InternalVector2 = new Vector2(x, y); InternalPointF = new System.Drawing.PointF(x, y); X = x; Y = y; }
public VirtualProjector(string name, string uuid, Size imageSize, PointF principal, double focalLength, Screen screen, Chessboard chessboard, CaptureCamera captureCamera) : base(name, uuid, imageSize, principal, focalLength, screen, chessboard, captureCamera) { OrbitCamera = new OrbitCamera("{0} Orbit".FormatWith(name), "N/A", imageSize, principal, focalLength, Intrinsics.NearPlaneDistance, Intrinsics.FarPlaneDistance) { Color = Color.DarkCyan.Alpha(0.7f), }; Color = Color.DarkKhaki.Alpha(0.6f); ProgramTask.AttachInputToCamera(Program.WhenInput.Where(input => !input.KeyPressed(Keys.C)), Window, OrbitCamera); Program.WhenInput.Where(input => input.KeyDown(Keys.P)).Subscribe(input => { var frustum = new Frustum(OrbitCamera, Intrinsics.ImageSize); var plane = new Plane(Vector3.UnitZ, 0); var p0 = frustum.IntersectWithPlane(ProjectorQuadCorners[0].X, ProjectorQuadCorners[0].Y, plane).ToPointF(); var p1 = frustum.IntersectWithPlane(ProjectorQuadCorners[1].X, ProjectorQuadCorners[1].Y, plane).ToPointF(); var p2 = frustum.IntersectWithPlane(ProjectorQuadCorners[2].X, ProjectorQuadCorners[2].Y, plane).ToPointF(); var p3 = frustum.IntersectWithPlane(ProjectorQuadCorners[3].X, ProjectorQuadCorners[3].Y, plane).ToPointF(); var quadCorners = new[] { p0, p1, p2, p3, }; // Chessboard.HomographTo(quadCorners); }); }
public System.Drawing.PointF[] ComputeDepthFrameToCameraSpaceTable(int tableWidth = depthImageWidth, int tableHeight = depthImageHeight) { float fx = (float)depthCameraMatrix[0, 0]; float fy = (float)depthCameraMatrix[1, 1]; float cx = (float)depthCameraMatrix[0, 2]; float cy = (float)depthCameraMatrix[1, 2]; float[] kappa = new float[] { (float)depthLensDistortion[0], (float)depthLensDistortion[1] }; var table = new System.Drawing.PointF[tableWidth * tableHeight]; for (int y = 0; y < tableHeight; y++) for (int x = 0; x < tableWidth; x++) { double xout, yout; double framex = (double)x / (double)tableWidth * depthImageWidth; // in depth camera image coordinates double framey = (double)y / (double)tableHeight * depthImageHeight; CameraMath.Undistort(fx, fy, cx, cy, kappa, framex, (depthImageHeight - framey), out xout, out yout); var point = new System.Drawing.PointF(); point.X = (float)xout; point.Y = (float)yout; table[tableWidth * y + x] = point; } return table; }
public static Point ToDesktopPoint(CGPoint loc) { var result = new Point (loc.X, desktopBounds.Height - loc.Y); if (desktopBounds.Y < 0) result.Y += desktopBounds.Y; return result; }
public override void Draw(System.Drawing.Graphics g) { System.Drawing.SolidBrush solidBrush = new System.Drawing.SolidBrush(Color); System.Drawing.PointF upperLeftPoint = new System.Drawing.PointF((float)(Center.X - RadiusX), (float)(Center.Y - RadiusY)); System.Drawing.SizeF rectSize = new System.Drawing.SizeF((float)(2 * RadiusX), (float)(2 * RadiusY)); System.Drawing.RectangleF rect = new System.Drawing.RectangleF(upperLeftPoint, rectSize); g.FillEllipse(solidBrush, rect); solidBrush.Dispose(); }
private void PopulateRotationCorrectedTranslationIncrements(Angle headingChange) { double s = Math.Sin(headingChange.Rads); double c = Math.Cos(headingChange.Rads); m_TranslationIncrements.Clear(); System.Drawing.PointF[] featurePointPair = new System.Drawing.PointF[2]; List<TrackedFeature> trackedFeatures = m_VisualOdometer.TrackedFeatures; m_GroundFeatures.Clear(); for (int i = 0; i < trackedFeatures.Count; i++) { TrackedFeature trackedFeature = trackedFeatures[i]; if (trackedFeature.Count < 2) { continue; } // previous and current feature points need to be in the ground region if (!(trackedFeature[-1].Y > m_VisualOdometer.GroundRegionTop && trackedFeature[0].Y > m_VisualOdometer.GroundRegionTop)) { continue; } featurePointPair[0] = trackedFeature[-1]; // previous feature location featurePointPair[1] = trackedFeature[0]; // current featue location //Debug.WriteLine("Raw:"); //Debug.WriteLine("\tPrevious dx_r: {0:0.000} dy_r: {1:0.000}", featurePointPair[0].X, featurePointPair[0].Y); //Debug.WriteLine("\tCurrent dx_r: {0:0.000} dy_r: {1:0.000}", featurePointPair[1].X, featurePointPair[1].Y); ProjectOnFloor(featurePointPair); //Debug.WriteLine("Ground:"); //Debug.WriteLine("\tPrevious dx_r: {0:0.000} dy_r: {1:0.000}", featurePointPair[0].X, featurePointPair[0].Y); //Debug.WriteLine("\tCurrent dx_r: {0:0.000} dy_r: {1:0.000}", featurePointPair[1].X, featurePointPair[1].Y); // Remove rotation effect on current feature location. The center of the rotation is the previous feature location Point rotationCorrectedEndPoint = new Point( c * featurePointPair[1].X - s * featurePointPair[1].Y, s * featurePointPair[1].X + c * featurePointPair[1].Y); Point translationIncrement = new Point( featurePointPair[0].X - rotationCorrectedEndPoint.X, featurePointPair[0].Y - rotationCorrectedEndPoint.Y); //double translationAngle = Math.Abs(Math.Atan2(translationIncrement.X, translationIncrement.Y)); ////Debug.WriteLine(translationAngle * 180 / Math.PI); //if (translationAngle > acceptedDirectionMisaligment) //{ // continue; //} //m_UsedGroundFeatures.Add(trackedFeature); m_TranslationIncrements.Add(translationIncrement); m_GroundFeatures.Add(trackedFeature); } }
public override void Draw(System.Drawing.Graphics g) { System.Drawing.SolidBrush solidBrush = new System.Drawing.SolidBrush(Color); System.Drawing.PointF upperLeftPoint = new System.Drawing.PointF((float)(Center.X - Width / 2), (float)(Center.Y - Height / 2)); System.Drawing.SizeF rectSize = new System.Drawing.SizeF((float)Width, (float)Height); System.Drawing.RectangleF rect = new System.Drawing.RectangleF(upperLeftPoint, rectSize); g.FillRectangle(solidBrush, rect); solidBrush.Dispose(); }
public override void Draw(System.Drawing.Graphics g) { System.Drawing.SolidBrush solidBrush = new System.Drawing.SolidBrush(Color); System.Drawing.PointF[] points = new System.Drawing.PointF[3]; points[0] = new System.Drawing.PointF((float)Vertex1.X, (float)Vertex1.Y); points[1] = new System.Drawing.PointF((float)Vertex2.X, (float)Vertex2.Y); points[2] = new System.Drawing.PointF((float)Vertex3.X, (float)Vertex3.Y); g.FillPolygon(solidBrush, points); solidBrush.Dispose(); }
public static System.Drawing.PointF[] ToPointFArray(DataP3[] data) { int n = data.Length; System.Drawing.PointF[] newData = new System.Drawing.PointF[n]; for (int i = 0; i < n; i++) { newData[i] = new System.Drawing.PointF((float)data[i].X, (float)data[i].Y); } return newData; }
/// <summary> /// Initializes a new LabelStyle /// </summary> public LabelStyle() { _Font = new System.Drawing.Font("Times New Roman", 12f); _Offset = new System.Drawing.PointF(0, 0); _CollisionDetection = false; _CollisionBuffer = new System.Drawing.Size(0, 0); _ForeColor = System.Drawing.Color.Black; _HorisontalAlignment = HorizontalAlignmentEnum.Center; _VerticalAlignment = VerticalAlignmentEnum.Middle; }
public void TestDashedLineStartEnd() { System.Drawing.PointF start = new System.Drawing.PointF(); System.Drawing.PointF end = new System.Drawing.PointF(); string expectedString = null; string resultString = null; resultString = SVGUtil.DashedLine(start, end); Assert.AreEqual(expectedString, resultString, "DashedLine method returned unexpected result."); Assert.Fail("Create or modify test(s)."); }
public override void LayoutSublayers() { base.LayoutSublayers(); var radius = (CircleSize / 2f) * 1.2f; var center = new CGPoint(Frame.Width / 2f, Frame.Y + Frame.Height / 2f); var startAngle = 0 - Helpers.fPI2; var endAngle = (Helpers.fPI * 2 - Helpers.fPI2) + Helpers.fPI / 8; var clockwise = true; Path = UIBezierPath.FromArc(center, radius, startAngle, endAngle, clockwise).CGPath; }
public RealProjector(string name, string uuid, Size imageSize, PointF principal, double focalLength, Screen screen) : base(name, uuid, imageSize, principal, focalLength) { Window = new Window("Pentacorn Projector") { LocatedOnScreen = screen, FullScreen = true, }; Color = Color.DarkBlue; Window.Scene = new Scene(new ScreenCamera(Window)) { new Clear(Color.White), }; }
public InvoiceTableView() : base(MonoTouch.UIKit.UITableViewStyle.Plain) { CellIdentifier = InvoiceLineCell.Key; SectionHeaderHeight = 30; RowHeight = 60; ContentOffset = new System.Drawing.PointF (0, -100); BackgroundView = null; BackgroundColor = UIColor.Clear; AutoScroll = true; this.ItemTapped = (i) => { (i as InvoiceLine).ToggleSelected(); }; }
public static void Init(TestContext context) { dp1 = new System.Drawing.Point(10, 10); dp2 = new System.Drawing.Point(100, 100); drect = new System.Drawing.Rectangle(5, 5, 15, 15); wp1 = new System.Windows.Point(10, 10); wp2 = new System.Windows.Point(100, 100); wrect = new System.Windows.Rect(5, 5, 15, 15); fp1 = new System.Drawing.PointF(10, 10); fp2 = new System.Drawing.PointF(100, 100); frect = new System.Drawing.RectangleF(5, 5, 15, 15); }
public override void Start() { time = 20; win = 10; startingTime = 3; state = State.Starting; x = 0; y = 0; recPos = new System.Drawing.PointF(50, 50); timer.Start(); }
private ITriangle FindTriangleAt(float x, float y) { // Get mesh coordinates var p = new System.Drawing.PointF(x, y); renderControl.Zoom.ScreenToWorld(ref p); topoControlView.SetPosition(p); if (tree == null) { tree = new TriangleQuadTree(mesh, 5, 2); } return tree.Query(p.X, p.Y); }
//public PolygonShape2D(string containerName, List<Point> points) //{ // this.ContainerName = containerName; // this.Points = points; //} public PolygonShape2D(string containerName, Shape sh, Matrix m) { this.ContainerName = containerName; List<IShapeData> sd = sh.ShapeData; EnsureClockwise(sd); System.Drawing.PointF[] pts = new System.Drawing.PointF[ sd.Count]; for (int i = 0; i < sd.Count; i++) { pts[i] = new System.Drawing.PointF(sd[i].StartPoint.X, sd[i].StartPoint.Y); } System.Drawing.Drawing2D.Matrix m2 = m.GetDrawing2DMatrix(); m2.TransformPoints(pts); for (int i = 0; i < pts.Length; i++) { Points.Add(new Point(pts[i].X, pts[i].Y)); } }
public SpinnerLayer(CGRect frame) { Frame = new CGRect(0.0f, 0.0f, frame.Height, frame.Height); var radius = (frame.Height / 2.0f) * 0.5f; var center = new CGPoint(frame.Height / 2.0f, Bounds.GetMidY()); var startAngle = -(fPI / 2); var endAngle = fPI * 2.0f - (fPI / 2.0f); var clockwise = true; Path = UIBezierPath.FromArc(center, radius, startAngle, endAngle, clockwise).CGPath; FillColor = null; StrokeColor = UIColor.White.CGColor; LineWidth = 1.0f; StrokeEnd = 0.4f; Hidden = true; }
public virtual bool IsPointInside(Point p) { bool result = false; System.Drawing.Drawing2D.Matrix m = this.Transforms[0].Matrix.GetDrawing2DMatrix(); m.Invert(); System.Drawing.PointF[] p2 = new System.Drawing.PointF[] { new System.Drawing.PointF(p.X, p.Y) }; m.TransformPoints(p2); Point tp = new Point(p2[0].X, p2[0].Y); for (int i = 0; i < this.Definition.Shapes.Count; i++) { if (this.Definition.Shapes[i].IsPointInside(tp)) { result = true; break; } } return result; }
/// <summary> /// Calculates optical flow for a sparse feature set using iterative Lucas-Kanade method in pyramids /// </summary> /// <param name="prev">First frame, at time t</param> /// <param name="curr">Second frame, at time t + dt </param> /// <param name="prevPyrBuffer">Buffer for the pyramid for the first frame. If it is not NULL, the buffer must have a sufficient size to store the pyramid from level 1 to level #level ; the total size of (image_width+8)*image_height/3 bytes is sufficient</param> /// <param name="currPyrBuffer">Similar to prev_pyr, used for the second frame</param> /// <param name="prevFeatures">Array of points for which the flow needs to be found</param> /// <param name="winSize">Size of the search window of each pyramid level</param> /// <param name="level">Maximal pyramid level number. If 0 , pyramids are not used (single level), if 1 , two levels are used, etc</param> /// <param name="criteria">Specifies when the iteration process of finding the flow for each point on each pyramid level should be stopped</param> /// <param name="flags">Flags</param> /// <param name="currFeatures">Array of 2D points containing calculated new positions of input features in the second image</param> /// <param name="status">Array. Every element of the array is set to 1 if the flow for the corresponding feature has been found, 0 otherwise</param> /// <param name="trackError">Array of double numbers containing difference between patches around the original and moved points</param> public static void PyrLK( Image<Gray, Byte> prev, Image<Gray, Byte> curr, Image<Gray, Byte> prevPyrBuffer, Image<Gray, Byte> currPyrBuffer, System.Drawing.PointF[] prevFeatures, System.Drawing.Size winSize, int level, MCvTermCriteria criteria, Emgu.CV.CvEnum.LKFLOW_TYPE flags, out System.Drawing.PointF[] currFeatures, out Byte[] status, out float[] trackError) { if (prevPyrBuffer == null) { prevPyrBuffer = new Image<Gray, byte>(prev.Width + 8, prev.Height / 3); } if (currPyrBuffer == null) { currPyrBuffer = prevPyrBuffer.CopyBlank(); } status = new Byte[prevFeatures.Length]; trackError = new float[prevFeatures.Length]; currFeatures = new System.Drawing.PointF[prevFeatures.Length]; CvInvoke.cvCalcOpticalFlowPyrLK( prev, curr, prevPyrBuffer, currPyrBuffer, prevFeatures, currFeatures, prevFeatures.Length, winSize, level, status, trackError, criteria, flags); }
public void addPoint(PointF p) { count++; if (count == 1) { firstp = p; prevp = p; } else { float m; float length; PointF n = new PointF(p.X - prevp.X, p.Y - prevp.Y); length = (float)Math.Sqrt(n.X * n.X + n.Y * n.Y); if (length == 0.0) return; m = width / length; PointF d = new PointF(-n.Y * m, n.X * m); rendererCallback.SetDefaultTex2(dashPhase, 0.5f); if (count > 2) drawJoin(prevp, prevd, d); drawLine(prevp, p, d, length / context.lineWidth / dashLength); dashPhase += length / context.lineWidth / dashLength; dashPhase -= (float)Math.Floor(dashPhase); rendererCallback.SetDefaultTex2(dashPhase, 0.5f); if (count == 2 && !isClosed) { PointF d2 = new PointF(-d.X, -d.Y); drawCap(prevp, d2); } prevp = p; prevd = d; if (count == 2) firstd = d; } }
public void AddLineToPoint(CGAffineTransform transform, CGPoint point) { CGPathAddLineToPoint(handle, ref transform, point.X, point.Y); }
public void AddCurveToPoint(CGAffineTransform transform, CGPoint cp1, CGPoint cp2, CGPoint point) { CGPathAddCurveToPoint(handle, ref transform, cp1.X, cp1.Y, cp2.X, cp2.Y, point.X, point.Y); }
public bool Contains(System.Drawing.PointF pt) { throw null; }
public void AddLineToPoint(CGPoint point) { CGPathAddLineToPoint(handle, IntPtr.Zero, point.X, point.Y); }
public bool ContainsPoint(CGPoint point, bool eoFill) { return(CGPathContainsPoint(handle, IntPtr.Zero, point, eoFill)); }
public static CGPoint ToNS(this System.Drawing.PointF point) { return(new CGPoint(point.X, point.Y)); }
internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectSensor) { var stopWatch = new System.Diagnostics.Stopwatch(); stopWatch.Start(); var objectPoints1 = new List <Vector <double> >(); var colorPoints1 = new List <System.Drawing.PointF>(); var depthPoints1 = new List <System.Drawing.PointF>(); int n = 0; for (float x = -2f; x < 2f; x += 0.2f) { for (float y = -2f; y < 2f; y += 0.2f) { for (float z = 0.4f; z < 4.5f; z += 0.4f) { var kinectCameraPoint = new CameraSpacePoint(); kinectCameraPoint.X = x; kinectCameraPoint.Y = y; kinectCameraPoint.Z = z; // use SDK's projection // adjust Y to make RH coordinate system that is a projection of Kinect 3D points var kinectColorPoint = kinectSensor.CoordinateMapper.MapCameraPointToColorSpace(kinectCameraPoint); kinectColorPoint.Y = colorImageHeight - kinectColorPoint.Y; var kinectDepthPoint = kinectSensor.CoordinateMapper.MapCameraPointToDepthSpace(kinectCameraPoint); kinectDepthPoint.Y = depthImageHeight - kinectDepthPoint.Y; if ((kinectColorPoint.X >= 0) && (kinectColorPoint.X < colorImageWidth) && (kinectColorPoint.Y >= 0) && (kinectColorPoint.Y < colorImageHeight) && (kinectDepthPoint.X >= 0) && (kinectDepthPoint.X < depthImageWidth) && (kinectDepthPoint.Y >= 0) && (kinectDepthPoint.Y < depthImageHeight)) { n++; var objectPoint = Vector <double> .Build.Dense(3); objectPoint[0] = kinectCameraPoint.X; objectPoint[1] = kinectCameraPoint.Y; objectPoint[2] = kinectCameraPoint.Z; objectPoints1.Add(objectPoint); var colorPoint = new System.Drawing.PointF(); colorPoint.X = kinectColorPoint.X; colorPoint.Y = kinectColorPoint.Y; colorPoints1.Add(colorPoint); //Console.WriteLine(objectPoint[0] + "\t" + objectPoint[1] + "\t" + colorPoint.X + "\t" + colorPoint.Y); var depthPoint = new System.Drawing.PointF(); depthPoint.X = kinectDepthPoint.X; depthPoint.Y = kinectDepthPoint.Y; depthPoints1.Add(depthPoint); } } } } this.colorCameraMatrix[0, 0] = 1000; //fx this.colorCameraMatrix[1, 1] = 1000; //fy this.colorCameraMatrix[0, 2] = colorImageWidth / 2; //cx this.colorCameraMatrix[1, 2] = colorImageHeight / 2; //cy this.colorCameraMatrix[2, 2] = 1; var rotation = Vector <double> .Build.Dense(3); var translation = Vector <double> .Build.Dense(3); var colorError = CalibrateColorCamera(objectPoints1, colorPoints1, colorCameraMatrix, colorLensDistortion, rotation, translation, this.silent); var rotationMatrix = RotationExtensions.AxisAngleToMatrix(rotation); this.depthToColorTransform = Matrix <double> .Build.DenseIdentity(4, 4); for (int i = 0; i < 3; i++) { this.depthToColorTransform[i, 3] = translation[i]; for (int j = 0; j < 3; j++) { this.depthToColorTransform[i, j] = rotationMatrix[i, j]; } } this.depthCameraMatrix[0, 0] = 360; //fx this.depthCameraMatrix[1, 1] = 360; //fy this.depthCameraMatrix[0, 2] = depthImageWidth / 2.0; //cx this.depthCameraMatrix[1, 2] = depthImageHeight / 2.0; //cy this.depthCameraMatrix[2, 2] = 1; var depthError = CalibrateDepthCamera(objectPoints1, depthPoints1, depthCameraMatrix, depthLensDistortion, silent); // check projections double depthProjectionError = 0; double colorProjectionError = 0; var testObjectPoint4 = Vector <double> .Build.Dense(4); for (int i = 0; i < n; i++) { var testObjectPoint = objectPoints1[i]; var testDepthPoint = depthPoints1[i]; var testColorPoint = colorPoints1[i]; // "camera space" == depth camera space // depth camera projection double depthU, depthV; Project(depthCameraMatrix, depthLensDistortion, testObjectPoint[0], testObjectPoint[1], testObjectPoint[2], out depthU, out depthV); double dx = testDepthPoint.X - depthU; double dy = testDepthPoint.Y - depthV; depthProjectionError += (dx * dx) + (dy * dy); // color camera projection testObjectPoint4[0] = testObjectPoint[0]; testObjectPoint4[1] = testObjectPoint[1]; testObjectPoint4[2] = testObjectPoint[2]; testObjectPoint4[3] = 1; var color = depthToColorTransform * testObjectPoint4; color *= (1.0 / color[3]); // not necessary for this transform double colorU, colorV; Project(colorCameraMatrix, colorLensDistortion, color[0], color[1], color[2], out colorU, out colorV); dx = testColorPoint.X - colorU; dy = testColorPoint.Y - colorV; colorProjectionError += (dx * dx) + (dy * dy); } depthProjectionError /= n; colorProjectionError /= n; stopWatch.Stop(); if (!this.silent) { Console.WriteLine("FakeCalibration :"); Console.WriteLine("n = " + n); Console.WriteLine("color error = " + colorError); Console.WriteLine("depth error = " + depthError); Console.WriteLine("depth reprojection error = " + depthProjectionError); Console.WriteLine("color reprojection error = " + colorProjectionError); Console.WriteLine("depth camera matrix = \n" + depthCameraMatrix); Console.WriteLine("depth lens distortion = \n" + depthLensDistortion); Console.WriteLine("color camera matrix = \n" + colorCameraMatrix); Console.WriteLine("color lens distortion = \n" + colorLensDistortion); Console.WriteLine(stopWatch.ElapsedMilliseconds + " ms"); Console.WriteLine("________________________________________________________"); } }
public bool ContainsPoint(CGAffineTransform m, CGPoint point, bool eoFill) { return(CGPathContainsPoint(handle, ref m, point, eoFill)); }
public static System.Drawing.Point Round(System.Drawing.PointF value) { throw null; }
public void DrawEan13Barcode(System.Drawing.Graphics g, System.Drawing.PointF pt) //System.Drawing.Point pt ) { float width = this.Width * this.Scale; float height = this.Height * this.Scale; // EAN13 Barcode should be a total of 113 modules wide. float lineWidth = width / 113f; // Save the GraphicsState. System.Drawing.Drawing2D.GraphicsState gs = g.Save(); // Set the PageUnit to Inch because all of our measurements are in inches. g.PageUnit = System.Drawing.GraphicsUnit.Millimeter; // Set the PageScale to 1, so a millimeter will represent a true millimeter. g.PageScale = 1; System.Drawing.SolidBrush brush = new System.Drawing.SolidBrush(System.Drawing.Color.Black); float xPosition = pt.X; System.Text.StringBuilder strbEan13 = new System.Text.StringBuilder(); System.Text.StringBuilder sbTemp = new System.Text.StringBuilder(); float xStart = pt.X; float yStart = pt.Y; float xEnd = 0; System.Drawing.Font font = new System.Drawing.Font("Arial", this._fFontSize * this.Scale); // Calculate the Check Digit. this.CalculateChecksumDigit(); sbTemp.AppendFormat("{0}{1}{2}{3}", this.CountryCode, this.ManufacturerCode, this.ProductCode, this.ChecksumDigit); string sTemp = sbTemp.ToString(); string sLeftPattern = ""; // Convert the left hand numbers. sLeftPattern = ConvertLeftPattern(sTemp.Substring(0, 7)); // Build the UPC Code. strbEan13.AppendFormat("{0}{1}{2}{3}{4}{1}{0}", this._sQuiteZone, this._sLeadTail, sLeftPattern, this._sSeparator, ConvertToDigitPatterns(sTemp.Substring(7), this._aRight)); string sTempUpc = strbEan13.ToString(); float fTextHeight = g.MeasureString(sTempUpc, font).Height; // Draw the barcode lines. for (int i = 0; i < strbEan13.Length; i++) { if (sTempUpc.Substring(i, 1) == "1") { if (xStart == pt.X) { xStart = xPosition; } // Save room for the UPC number below the bar code. if ((i > 12 && i < 55) || (i > 57 && i < 101)) { // Draw space for the number g.FillRectangle(brush, xPosition, yStart, lineWidth, height - fTextHeight); } else { // Draw a full line. g.FillRectangle(brush, xPosition, yStart, lineWidth, height); } } xPosition += lineWidth; xEnd = xPosition; } // Draw the upc numbers below the line. xPosition = xStart - g.MeasureString(this.CountryCode.Substring(0, 1), font).Width; float yPosition = yStart + (height - fTextHeight); // Draw 1st digit of the country code. g.DrawString(sTemp.Substring(0, 1), font, brush, new System.Drawing.PointF(xPosition, yPosition)); xPosition += (g.MeasureString(sTemp.Substring(0, 1), font).Width + 43 * lineWidth) - (g.MeasureString(sTemp.Substring(1, 6), font).Width); // Draw MFG Number. g.DrawString(sTemp.Substring(1, 6), font, brush, new System.Drawing.PointF(xPosition, yPosition)); xPosition += g.MeasureString(sTemp.Substring(1, 6), font).Width + (11 * lineWidth); // Draw Product ID. g.DrawString(sTemp.Substring(7, 5), font, brush, new System.Drawing.PointF(xPosition, yPosition)); xPosition += g.MeasureString(sTemp.Substring(7, 5), font).Width + lineWidth; //g.DrawString(Barkodisim, font, brush, new System.Drawing.PointF(xStart - 2, yPosition + 5)); //SON NUMARA brush = new System.Drawing.SolidBrush(System.Drawing.Color.DarkRed); g.DrawString(sTemp.Substring(12), font, brush, new System.Drawing.PointF(xPosition, yPosition)); // Restore the GraphicsState. g.Restore(gs); }
public static System.Drawing.Point Ceiling(System.Drawing.PointF value) { throw null; }
public SizeF(System.Drawing.PointF pt) { throw null; }
internal static XPoint Point2XPoint(System.Drawing.PointF point) { return(new XPoint(point.X, point.Y)); }
public void AddCurveToPoint(CGPoint cp1, CGPoint cp2, CGPoint point) { CGPathAddCurveToPoint(handle, IntPtr.Zero, cp1.X, cp1.Y, cp2.X, cp2.Y, point.X, point.Y); }
public abstract bool HitTest(System.Drawing.PointF point, float precisionDelta);
extern static bool CGPathContainsPoint(IntPtr path, ref CGAffineTransform m, CGPoint point, bool eoFill);
/// <summary> /// Convert a <see cref="System.Drawing.PointF"/> to the android native <see cref="Point"/>. /// </summary> /// <param name="value">The value to convert.</param> /// <returns>A <see cref="Point"/> of the value.</returns> public static Point ToNative(this System.Drawing.PointF value) { return(new Point(value.X, value.Y)); }
extern static bool CGPathContainsPoint(IntPtr path, IntPtr zero, CGPoint point, bool eoFill);
/// <summary> /// Returns the geometric distance between the two given points. /// </summary> /// <param name="p1">The first point.</param> /// <param name="p2">The second point.</param> /// <returns>The distance between p1 and p2.</returns> public static float DistanceBetweenPoints(System.Drawing.PointF p1, System.Drawing.PointF p2) { return((float)Math.Sqrt(Math.Pow(p1.X - p2.X, 2) + Math.Pow(p1.Y - p2.Y, 2))); }
public CGPathElement(int t) { Type = (CGPathElementType)t; Point1 = Point2 = Point3 = new CGPoint(0, 0); }
// System.Drawing.Point* public static SKPoint ToSKPoint(this System.Drawing.PointF point) { return(new SKPoint(point.X, point.Y)); }
public static PixelFarm.Drawing.PointF ToPointF(this System.Drawing.PointF pointf) { return(new PixelFarm.Drawing.PointF(pointf.X, pointf.Y)); }
public RectangleF(System.Drawing.PointF location, System.Drawing.SizeF size) { throw null; }
public void DragPoint(int index, System.Drawing.PointF newPosition) { _jointControlPointsProvider.DragPoint(index, newPosition); OnChanged(System.EventArgs.Empty); }
private void CreateAndReorder(int @case = 0) { elements.Clear(); var rand = new Random(); var a = new List <int[]>(); var count = 25; a.Add(new int[count]); a.Add(new int[count]); a.Add(new int[count]); a.Add(new int[count]); a.Add(new int[count]); a.Add(new int[count]); var cash = new List <int>(); var initCash = false; foreach (var arr in a) { elements.Add(new ArrayElements()); var length = a.First().Length; for (var i = 0; i < length; i++) { var value = @case == 0 ? rand.Next(1, 100) : length - i; arr[i] = initCash ? cash[i] : value; if (!initCash) { cash.Add(value); } } initCash = true; } // добавим элементы var n = 0; var headColumnWidth = tlpHeader.Bounds.Width / tlpHeader.ColumnCount - 2; var x = tlpHeader.Bounds.Left + headColumnWidth / 2 - 2; var y = tlpHeader.Bounds.Bottom + 5; foreach (var arr in a) { var location = new System.Drawing.PointF(x, y); var length = a.First().Length; for (var i = 0; i < length; i++) { var element = CreateArrayElement(); element.Location = location; element.Value = arr[i]; elements[n].Add(element); location.Y += element.Size.Height + 5; } n++; x += headColumnWidth; } var logs = new List <Tuple <int, int> > [a.Count]; for (var i = 0; i < a.Count; i++) { switch (i) { case 0: logs[i] = MethodsHolder.BubbleSort(a[i]); break; case 1: logs[i] = MethodsHolder.ShakerSort(a[i]); break; case 2: logs[i] = MethodsHolder.InsertionSort(a[i]); break; case 3: logs[i] = MethodsHolder.QuickSort(a[i]); break; case 4: logs[i] = MethodsHolder.CombSort(a[i]); break; case 5: logs[i] = MethodsHolder.SelectionSort(a[i]); break; } } // запускаем поток для модификации модели pkgPainter.RunWorkerAsync(logs); }
public override void WillEndDragging(UIScrollView scrollView, CGPoint velocity, ref CGPoint targetContentOffset) { // If snap points are enabled, override the target offset with the calculated snap point. var snapTo = Owner?.NativeLayout?.GetSnapTo(velocity, targetContentOffset); if (snapTo.HasValue) { targetContentOffset = snapTo.Value; } }
public static System.Drawing.PointF AsDrawingPointF(this System.Windows.Point point) { var drawingPoint = new System.Drawing.PointF((float)point.X, (float)point.Y); return(drawingPoint); }
public void Offset(System.Drawing.PointF pos) { }
public void MoveToPoint(CGPoint point) { CGPathMoveToPoint(handle, IntPtr.Zero, point.X, point.Y); }
/// <summary> /// Find ellipses in image /// </summary> /// <param name="img">Image to search pattern for</param> /// <param name="image_points">Detected centers</param> /// <returns>True if pattern was found, false otherwise</returns> public override bool FindPattern(Emgu.CV.Image<Gray, byte> img, out System.Drawing.PointF[] image_points) { Emgu.CV.Image<Gray, byte> gray = img.Convert<Gray, byte>(); gray._ThresholdBinary(new Gray(_binary_threshold), new Gray(255.0)); gray._Not(); // Circles are black, black is considered backgroud, therefore flip. Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector(); ed.MinimumContourCount = this.MinimumContourCount; // Detect initial ellipses List<Parsley.Core.DetectedEllipse> ellipses = new List<Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray)); // Filter out all ellipses below rating threshold List<Parsley.Core.DetectedEllipse> finals = new List<Parsley.Core.DetectedEllipse>( ellipses.Where(e => { return e.Rating < this.MeanDistanceThreshold; }) ); // At least the number of required ellipses need to be found if (finals.Count < _number_circle_centers.Width * _number_circle_centers.Height) { image_points = new System.Drawing.PointF[0]; return false; } int[] marker_ids; if (!FindMarkerEllipses(gray, finals, out marker_ids)) { image_points = new System.Drawing.PointF[0]; return false; } // Check that all markers are found if (marker_ids.Length != 4) { image_points = new System.Drawing.PointF[0]; return false; } // Find intrinsic/extrinsic calibration matrices based on known marker correspondences Emgu.CV.IntrinsicCameraParameters icp; Emgu.CV.ExtrinsicCameraParameters ecp; ApproximatePlane(finals, marker_ids, out icp, out ecp, img.Size); // Project all object points to image points MCvPoint3D32f[] converted_object_points = Array.ConvertAll( this.ObjectPoints.ToArray(), value => { return value.ToEmguF(); }); System.Drawing.PointF[] expected_image_points = Emgu.CV.CameraCalibration.ProjectPoints(converted_object_points, ecp, icp); image_points = expected_image_points.Select( e => { return NearestEllipseCenter(finals, e); } ).Where( ne => { return Math.Sqrt(ne.dist2) < _ellipse_distance; } ).Select( ne => { return ne.center; } ).ToArray(); // currently we need to detect all requested ellipses. return image_points.Length == _number_circle_centers.Width * _number_circle_centers.Height; }
public static System.Drawing.Point Truncate(System.Drawing.PointF value) { throw null; }
private void AddVirtualCaptureCamera(string name, string uuid, Color color) { var size = new Size(1600, 1200); var principal = new PointF(392.713f * 2, 274.4965f * 2); var orbitCamera = new OrbitCamera("{0} Orbit".FormatWith(name), uuid, size, principal, size.Width, nearPlaneDistance: 2, farPlaneDistance: 40) { Color = Color.DarkCyan, Center = new Vector3(5, 4, 0), Distance = 30, YawPitch = new Vector2(0.1f, 0.1f), }; var virtualCapture = new VirtualCapture(name, uuid, size, fps: 10) { Scene = new Scene(orbitCamera) { new Clear(Color.White), Chessboard } }; var virtualCaptureCamera = new CaptureCamera(virtualCapture, 0) { Color = color }; WorldScene.Add(orbitCamera); CaptureCameras.Add(virtualCaptureCamera); }
Path smoothedPathWithGranularity (int granularity, out List<System.Drawing.PointF> smoothedPoints) { List<System.Drawing.PointF> pointsArray = currentPoints; smoothedPoints = new List<System.Drawing.PointF> (); //Not enough points to smooth effectively, so return the original path and points. if (pointsArray.Count < 4) { smoothedPoints = pointsArray; return currentPath; } //Create a new bezier path to hold the smoothed path. Path smoothedPath = new Path (); //Duplicate the first and last points as control points. pointsArray.Insert (0, pointsArray [0]); pointsArray.Add (pointsArray [pointsArray.Count - 1]); //Add the first point smoothedPath.MoveTo (pointsArray [0].X, pointsArray [0].Y); smoothedPoints.Add (pointsArray [0]); for (var index = 1; index < pointsArray.Count - 2; index++) { System.Drawing.PointF p0 = pointsArray [index - 1]; System.Drawing.PointF p1 = pointsArray [index]; System.Drawing.PointF p2 = pointsArray [index + 1]; System.Drawing.PointF p3 = pointsArray [index + 2]; //Add n points starting at p1 + dx/dy up until p2 using Catmull-Rom splines for (var i = 1; i < granularity; i++) { float t = (float)i * (1f / (float)granularity); float tt = t * t; float ttt = tt * t; //Intermediate point System.Drawing.PointF mid = new System.Drawing.PointF (); mid.X = 0.5f * (2f * p1.X + (p2.X - p0.X) * t + (2f * p0.X - 5f * p1.X + 4f * p2.X - p3.X) * tt + (3f * p1.X - p0.X - 3f * p2.X + p3.X) * ttt); mid.Y = 0.5f * (2 * p1.Y + (p2.Y - p0.Y) * t + (2 * p0.Y - 5 * p1.Y + 4 * p2.Y - p3.Y) * tt + (3 * p1.Y - p0.Y - 3 * p2.Y + p3.Y) * ttt); smoothedPath.LineTo (mid.X, mid.Y); smoothedPoints.Add (mid); } //Add p2 smoothedPath.LineTo (p2.X, p2.Y); smoothedPoints.Add (p2); } //Add the last point System.Drawing.PointF last = pointsArray [pointsArray.Count - 1]; smoothedPath.LineTo (last.X, last.Y); smoothedPoints.Add (last); return smoothedPath; }
//Iterate through the touch history since the last touch event and add them to the path and points list. void handleTouch (MotionEvent e) { float touchX = e.GetX (); float touchY = e.GetY (); System.Drawing.PointF touch = new System.Drawing.PointF (touchX, touchY); resetBounds (touchX, touchY); for (var i = 0; i < e.HistorySize; i++) { float historicalX = e.GetHistoricalX(i); float historicalY = e.GetHistoricalY(i); System.Drawing.PointF historical = new System.Drawing.PointF (historicalX, historicalY); updateBounds (historicalX, historicalY); currentPath.LineTo (historicalX, historicalY); currentPoints.Add (historical); } currentPath.LineTo (touchX, touchY); currentPoints.Add (touch); }
public override bool OnTouchEvent (MotionEvent e) { float touchX = e.GetX (); float touchY = e.GetY (); System.Drawing.PointF touch = new System.Drawing.PointF (touchX, touchY); switch (e.Action) { case MotionEventActions.Down: lastX = touchX; lastY = touchY; //Create a new path and move to the touched point. currentPath = new Path(); currentPath.MoveTo (touchX, touchY); //Clear the list of points then add the touched point currentPoints.Clear (); currentPoints.Add (touch); //Display the clear button lblClear.Visibility = ViewStates.Visible; return true; case MotionEventActions.Move: handleTouch (e); canvasView.Invalidate( (int) (dirtyRect.Left - 1), (int) (dirtyRect.Top - 1), (int) (dirtyRect.Right + 1), (int) (dirtyRect.Bottom + 1)); break; case MotionEventActions.Up: handleTouch (e); currentPath = smoothedPathWithGranularity (20, out currentPoints); //Add the current path and points to their respective lists. paths.Add (currentPath); points.Add (currentPoints.ToArray ()); DrawStrokes (); canvasView.Invalidate (); break; default: return false; } lastX = touchX; lastY = touchY; return true; }
public void TestConvertSystemToParsley() { System.Drawing.PointF p = new System.Drawing.PointF(1.0f, 2.0f); MathNet.Numerics.LinearAlgebra.Vector v = p.ToParsley(); Assert.AreEqual(1.0, v[0]); Assert.AreEqual(2.0, v[1]); }