public static object GetAnimateValue(SvgElement element, string attributename, DomType domtype, object orivalue) { PointF[] tfArray6; PointF[] tfArray7; PointF[] tfArray8; int num8; Matrix matrix1 = new Matrix(); string text1 = string.Empty; GraphicsPath path1 = null; string text2 = string.Empty; PointF[] tfArray1 = null; bool flag1 = true; if (element.AnimateNameValues.ContainsKey(attributename)) { AnimateInfo info1 = (AnimateInfo) element.AnimateNameValues[attributename]; object[] objArray1 = info1.AnimateValues; bool[] flagArray1 = info1.ValueAdds; int num1 = 0; if ((domtype == DomType.SvgString) || (domtype == DomType.SvgLink)) { for (int num2 = objArray1.Length - 1; num2 >= 0; num2--) { if ((objArray1[num2] is string) && (objArray1[num2].ToString() != string.Empty)) { if (element is ItopVector.Core.Figure.Image) { ((ItopVector.Core.Figure.Image) element).RefImage = ImageFunc.GetImageForURL(objArray1[num2].ToString(), element); } return objArray1[num2].ToString(); } } return orivalue; } object[] objArray2 = objArray1; for (int num10 = 0; num10 < objArray2.Length; num10++) { PointF[] tfArray2; float single3; GraphicsPath path2; PointF[] tfArray3; PointF[] tfArray4; PointF[] tfArray5; object obj1 = objArray2[num10]; bool flag2 = flagArray1[num1]; switch (domtype) { case DomType.SvgMatrix: { Matrix matrix2 = new Matrix(); if ((obj1 != null) && (obj1.ToString() != string.Empty)) { matrix2 = ((Matrix) obj1).Clone(); } if (flag2) { matrix1.Multiply(matrix2); goto Label_046F; } matrix1 = matrix2; goto Label_046F; } case DomType.SvgNumber: { single3 = 0f; if ((obj1 != null) && (obj1.ToString() != string.Empty)) { single3 = (float) obj1; if (!flag2 || (text1 == string.Empty)) { goto Label_0246; } float single9 = float.Parse(text1) + single3; text1 = single9.ToString(); } goto Label_046F; } case DomType.SvgString: { goto Label_046F; } case DomType.SvgColor: { string text3 = string.Empty; if ((obj1 != null) && (obj1.ToString() != string.Empty)) { text3 = (string) obj1; } if (text3 != string.Empty) { if ((flag2 && (text2 != string.Empty)) && (!text2.Trim().StartsWith("url") && !text3.Trim().StartsWith("url"))) { Color color1 = ColorFunc.ParseColor(text3); Color color2 = ColorFunc.ParseColor(text2); int num4 = (color1.R + color2.R) / 2; int num5 = (color1.G + color2.G) / 2; int num6 = (color1.B + color2.B) / 2; string[] textArray1 = new string[7] { "rgb(", num4.ToString(), ",", num5.ToString(), ",", num6.ToString(), ")" } ; text2 = string.Concat(textArray1); goto Label_046F; } text2 = text3; } goto Label_046F; } case DomType.SvgPath: { if ((obj1 != null) && (obj1.ToString() != string.Empty)) { path2 = (GraphicsPath) obj1; if (!flag2 || (path1 == null)) { goto Label_0460; } tfArray3 = path2.PathPoints; tfArray4 = path1.PathPoints; if (tfArray3.Length == tfArray4.Length) { goto Label_03B5; } } goto Label_046F; } case DomType.SvgPoints: { tfArray2 = new PointF[0]; if (obj1 is PointF[]) { tfArray2 = (PointF[]) obj1; } if (!flag2) { break; } if (tfArray1.Length == tfArray2.Length) { for (int num3 = 0; num3 < tfArray2.Length; num3++) { PointF tf1 = tfArray1[num3]; PointF tf2 = tfArray2[num3]; float single1 = (tf1.X + tf2.X) / 2f; float single2 = (tf1.Y + tf2.Y) / 2f; tfArray1[num3] = new PointF(single1, single2); } } goto Label_046F; } default: { goto Label_046F; } } tfArray1 = (PointF[]) tfArray2.Clone(); goto Label_046F; Label_0246: text1 = single3.ToString(); goto Label_046F; Label_03B5: tfArray5 = new PointF[tfArray4.Length]; Array.Copy(tfArray3, tfArray1, tfArray5.Length); byte[] buffer1 = path2.PathTypes; byte[] buffer2 = path1.PathTypes; for (int num7 = 0; num7 < Math.Min(tfArray3.Length, tfArray4.Length); num7++) { PointF tf3 = tfArray3[num7]; PointF tf4 = tfArray4[num7]; float single4 = tf3.X + tf4.X; float single5 = tf3.Y + tf4.Y; tfArray5[num7] = new PointF(single4, single5); } path1 = new GraphicsPath(tfArray5, path2.PathTypes); goto Label_046D; Label_0460: path1 = (GraphicsPath) path2.Clone(); Label_046D:; Label_046F:; } if (flagArray1.Length > 0) { flag1 = flagArray1[flagArray1.Length - 1]; } } switch (domtype) { case DomType.SvgMatrix: { Matrix matrix3 = new Matrix(); if (orivalue != null) { matrix3 = ((Matrix) orivalue).Clone(); } if (flag1) { matrix3.Multiply(matrix1); } else { matrix3 = matrix1.Clone(); } return matrix3.Clone(); } case DomType.SvgNumber: { if ((flag1 && (orivalue != null)) && (orivalue.ToString() != string.Empty)) { float single6 = (float) orivalue; if (text1 == string.Empty) { text1 = single6.ToString(); break; } float single10 = float.Parse(text1) + single6; text1 = single10.ToString(); } break; } case DomType.SvgString: { return orivalue; } case DomType.SvgColor: { if (text2 == string.Empty) { return orivalue; } if ((flag1 && (orivalue != null)) && (!text2.Trim().StartsWith("url") && !((string) orivalue).Trim().StartsWith("url"))) { Color color3 = ColorFunc.ParseColor((string) orivalue); Color color4 = ColorFunc.ParseColor(text2); string[] textArray2 = new string[7]; textArray2[0] = "rgb("; int num11 = (color3.R + color4.R) / 2; textArray2[1] = num11.ToString(); textArray2[2] = ","; int num12 = (color3.G + color4.G) / 2; textArray2[3] = num12.ToString(); textArray2[4] = ","; int num13 = (color3.B + color4.B) / 2; textArray2[5] = num13.ToString(); textArray2[6] = ")"; text2 = string.Concat(textArray2); } return text2; } case DomType.SvgPath: { if (path1 == null) { return orivalue; } if (!flag1 || (orivalue == null)) { return path1; } tfArray6 = ((GraphicsPath) orivalue).PathPoints; tfArray7 = path1.PathPoints; tfArray8 = new PointF[tfArray6.Length]; Array.Copy(tfArray6, tfArray1, tfArray8.Length); num8 = 0; goto Label_0738; } case DomType.SvgPoints: { if (tfArray1.Length > 0) { PointF[] tfArray9 = new PointF[0]; if (!(orivalue is PointF[]) || !flag1) { return tfArray1; } tfArray9 = (PointF[]) orivalue; if (tfArray9.Length != tfArray1.Length) { return tfArray1; } for (int num9 = 0; num9 < tfArray1.Length; num9++) { tfArray1[num9] = new PointF((tfArray1[num9].X + tfArray9[num9].X) / 2f, (tfArray1[num9].Y + tfArray9[num9].Y) / 2f); } } return tfArray1; } default: { return string.Empty; } } if (text1 != string.Empty) { return float.Parse(text1); } if ((orivalue.ToString() == string.Empty) || (orivalue == null)) { return (float) AttributeFunc.GetDefaultValue(element, attributename); } return (float) orivalue; Label_0738: if (num8 >= Math.Min(tfArray6.Length, tfArray7.Length)) { return new GraphicsPath(tfArray8, path1.PathTypes); } PointF tf5 = tfArray6[num8]; PointF tf6 = tfArray7[num8]; float single7 = tf5.X + tf6.X; float single8 = tf5.Y + tf6.Y; tfArray8[num8] = new PointF(single7, single8); num8++; goto Label_0738; }
public void FillPolygon(Brush brush, PointF[] inputPoints) { PointF[] points = (PointF[])inputPoints.Clone(); Graphics g = _g; if (g == null) { return; } lock (this) { for (int i = 0; i < points.Length; i++) { Convert(ref points[i]); } g.FillPolygon(brush, points); } }
/// <summary> /// Scale and rotate a graphics path around 0,0 the moves path to the given /// origin /// </summary> /// <param name="gp"></param> /// <param name="origin"></param> /// <param name="rotation"></param> /// <param name="xScale"></param> /// <param name="yScale"></param> public static PointF[] ScaleRotateTranslatePoints(PointF[] p0, PointF origin, float rotation, float xScale, float yScale) { // create affine transform Matrix m = new Matrix(); // clone original points PointF[] p1 = (PointF[])p0.Clone(); // rotate and scale m.Scale(xScale, yScale, MatrixOrder.Prepend); m.Rotate(rotation, MatrixOrder.Prepend); m.Translate(origin.X, origin.Y); // transform points m.TransformPoints(p1); // return points return p1; }
/// <summary> /// Adds a sequence of connected cubic Bézier curves to the current figure. /// </summary> public void AddBeziers(PointF[] points) { if (points.Length < 4) throw new ArgumentException("At least four points required for bezier curve.", "points"); if ((points.Length - 1) % 3 != 0) throw new ArgumentException("Invalid number of points for bezier curve. Number must fulfil 4+3n.", "points"); this.items.Add(new XGraphicsPathItem(XGraphicsPathItemType.Beziers, points.Clone() as XPoint[])); this.dirty = true; this.gdipPath.AddBeziers(points); }
public void DrawPolygon(Pen pen, PointF[] inputPoints) { PointF[] points = (PointF[])inputPoints.Clone(); Graphics g = _g; if (g == null) { return; } lock (this) { for (int i = 0; i < points.Length; i++) { Convert(ref points[i]); } g.DrawPolygon(pen, points); } }
public void DrawPolygon(Pen pen, PointF[] inputPoints) { PointF[] points = (PointF[])inputPoints.Clone(); lock (this) { for (int i = 0; i < points.Length; i++) { Convert(ref points[i]); } _g.DrawPolygon(pen, points); } }
private void UpdateGraph(SvgElement polyline, PointF[] newpoints) { if (polyline != null) { StringBuilder text1 = new StringBuilder(); PointF[] tfArray1 = (PointF[])newpoints.Clone(); for (int num10 = 0; num10 < tfArray1.Length; num10++) { PointF tf1 = tfArray1[num10]; text1.Append( tf1.X.ToString() + " " + tf1.Y.ToString()); if (num10 < (tfArray1.Length - 1)) { text1.Append(","); } } SvgDocument document1 = this.mouseAreaControl.SVGDocument; bool flag1 = document1.AcceptChanges; document1.AcceptChanges = true; document1.NumberOfUndoOperations = 200; if (( (((polyline).InfoList.Count == 1) && (this.mouseAreaControl.SVGDocument.ControlTime == 0))) || ((polyline).ParentNode == null)) { AttributeFunc.SetAttributeValue(polyline, "points", text1.ToString()); } document1.NotifyUndo(); document1.AcceptChanges = flag1; } }
public void WorldToView(PointF[] worldPts, out PointF[] viewPts) { viewPts = (PointF[])worldPts.Clone(); mViewMatrix.TransformPoints(viewPts); }
/// <summary> /// Converts the given point array from View coordinates to the absolute World coordinates. /// </summary> /// <param name="viewPts">The view PTS.</param> /// <param name="worldPts">The world PTS.</param> public void ViewToWorld(PointF[] viewPts, out PointF[] worldPts) { worldPts = (PointF[])viewPts.Clone(); //Matrix matrix = GetViewMatrix(); //matrix.Invert(); mInvertedViewMatrix.TransformPoints(worldPts); }
///Crea una secuencia de puntos que permita dibujar ///la curva de forma digital. Lo que se hace es ///remuestrear la señal una y otra vez, interpolando puntos ///hasta que se alcanza el grado de mejora; private PointF[] digitalizar(PointF[] curva, int numMuestras) { PointF[] curvaParaDevolver = (PointF[])curva.Clone(); ; if (numMuestras == curva.Length) { return curva; } //Al principio toda curva tiene cuatro puntos; int gradoMejora = 4; while (gradoMejora != numMuestras) { curvaParaDevolver = duplicarMuestras(curvaParaDevolver); gradoMejora *= 2; } return curvaParaDevolver; }
// Change the locations associated with a special. public static void ChangeSpecialLocations(EventDB eventDB, Id<Special> specialId, PointF[] newLocations) { Special special = eventDB.GetSpecial(specialId); special = (Special) special.Clone(); special.locations = (PointF[]) newLocations.Clone(); eventDB.ReplaceSpecial(specialId, special); }
public Special(SpecialKind kind, PointF[] locations) { this.kind = kind; this.locations = (PointF[]) locations.Clone(); this.allCourses = true; }
/// <summary> /// Transforms the specified points from document /// coordinates to coordinates relative to the left-top /// point of the box. /// </summary> private PointF[] docToLocal(PointF[] points) { if (points == null) return null; PointF[] result = (PointF[])points.Clone(); RectangleF nrect = Utilities.normalizeRect(this.rect); for (int i = 0; i < result.Length; i++) { result[i].X -= nrect.X; result[i].Y -= nrect.Y; } return result; }
public void FillPolygon(Brush brush, PointF[] inputPoints) { PointF[] points = (PointF[])inputPoints.Clone(); lock (this) { for (int i = 0; i < points.Length; i++) { Convert(ref points[i]); } _g.FillPolygon(brush, points); } }
internal static float ConvertSweepAngle(float sweepAngle, float startAngle, SpatialTransform transform, CoordinateSystem targetSystem) { PointF x = new PointF(100, 0); PointF[] startVector = new PointF[] { x }; Matrix rotation = new Matrix(); rotation.Rotate(startAngle); rotation.TransformVectors(startVector); PointF[] sweepVector = (PointF[])startVector.Clone(); rotation.Reset(); rotation.Rotate(sweepAngle); rotation.TransformVectors(sweepVector); rotation.Dispose(); SizeF startVectorTransformed, sweepVectorTransformed; if (targetSystem == Graphics.CoordinateSystem.Destination) { startVectorTransformed = transform.ConvertToDestination(new SizeF(startVector[0])); sweepVectorTransformed = transform.ConvertToDestination(new SizeF(sweepVector[0])); } else { startVectorTransformed = transform.ConvertToSource(new SizeF(startVector[0])); sweepVectorTransformed = transform.ConvertToSource(new SizeF(sweepVector[0])); } // simply return the angle between the start and sweep angle, in the target system. return (int)Math.Round(Vector.SubtendedAngle(sweepVectorTransformed.ToPointF(), PointF.Empty, startVectorTransformed.ToPointF())); }
public static bool TestFeature2DTracker(Feature2D keyPointDetector, Feature2D descriptorGenerator) { //for (int k = 0; k < 1; k++) { Feature2D feature2D = null; if (keyPointDetector == descriptorGenerator) { feature2D = keyPointDetector as Feature2D; } Mat modelImage = EmguAssert.LoadMat("box.png"); //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg"); //modelImage = modelImage.Resize(400, 400, true); //modelImage._EqualizeHist(); #region extract features from the object image Stopwatch stopwatch = Stopwatch.StartNew(); VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint(); Mat modelDescriptors = new Mat(); if (feature2D != null) { feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false); } else { keyPointDetector.DetectRaw(modelImage, modelKeypoints); descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors); } stopwatch.Stop(); EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg"); Image<Gray, Byte> observedImage = EmguAssert.LoadImage<Gray, byte>("box_in_scene.png"); //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0)); //image = image.Resize(400, 400, true); //observedImage._EqualizeHist(); #region extract features from the observed image stopwatch.Reset(); stopwatch.Start(); VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint(); using (Mat observedDescriptors = new Mat()) { if (feature2D != null) { feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false); } else { keyPointDetector.DetectRaw(observedImage, observedKeypoints); descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors); } stopwatch.Stop(); EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Merge the object image and the observed image into one big image for display Image<Gray, Byte> res = modelImage.ToImage<Gray, Byte>().ConcateVertical(observedImage); Rectangle rect = new Rectangle(Point.Empty, modelImage.Size); PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; Mat homography = null; stopwatch.Reset(); stopwatch.Start(); int k = 2; DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2; //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k)) //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k)) using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch()) using (BFMatcher matcher = new BFMatcher(dt)) { //ParamDef[] parameterDefs = matcher.GetParams(); matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); //mask.SetValue(255); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2); } } stopwatch.Stop(); EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds)); bool success = false; if (homography != null) { PointF[] points = pts.Clone() as PointF[]; points = CvInvoke.PerspectiveTransform(points, homography); //homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) points[i].Y += modelImage.Height; res.DrawPolyline( #if NETFX_CORE Extensions. #else Array. #endif ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); success = true; } //Emgu.CV.UI.ImageViewer.Show(res); return success; } /* stopwatch.Reset(); stopwatch.Start(); //set the initial region to be the whole image using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size)) { priorMask.SetValue(1.0); homography = tracker.CamShiftTrack( observedFeatures, (RectangleF)observedImage.ROI, priorMask); } Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds)); if (homography != null) //set the initial tracking window to be the whole image { PointF[] points = pts.Clone() as PointF[]; homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) points[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); return true; } else { return false; }*/ } }
/// <summary> /// Transforms points' coordinates by Rectangle's dimensions /// </summary> /// <param name="points">Point array</param> /// <param name="rect">Rectangle for transformation</param> /// <returns>Modified point array</returns> private PointF[] docToLocal(PointF[] points, RectangleF rect) { PointF[] result = (PointF[])points.Clone(); for (int i = 0; i < result.Length; i++) { result[i].X -= rect.X; result[i].Y -= rect.Y; } return result; }
public void TestSURF() { for (int k = 0; k < 1; k++) { Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png"); //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg"); //modelImage = modelImage.Resize(400, 400, true); //modelImage._EqualizeHist(); #region extract features from the object image Stopwatch stopwatch = Stopwatch.StartNew(); MCvSURFParams param1 = new MCvSURFParams(500, false); SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref param1); SURFTracker tracker = new SURFTracker(modelFeatures); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg"); Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png"); //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0)); //Image<Gray, Byte> observedImage = new Image<Gray, byte>("left.jpg"); //image = image.Resize(400, 400, true); //observedImage._EqualizeHist(); #region extract features from the observed image stopwatch.Reset(); stopwatch.Start(); MCvSURFParams param2 = new MCvSURFParams(500, false); SURFFeature[] observedFeatures = observedImage.ExtractSURF(ref param2); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Merge the object image and the observed image into one big image for display Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage); Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; HomographyMatrix homography; stopwatch.Reset(); stopwatch.Start(); homography = tracker.Detect(observedFeatures, 0.8); stopwatch.Stop(); Trace.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds)); if (homography != null) { PointF[] points = pts.Clone() as PointF[]; homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) points[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); } stopwatch.Reset(); stopwatch.Start(); //set the initial region to be the whole image using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size)) { priorMask.SetValue(1.0); homography = tracker.CamShiftTrack( observedFeatures, (RectangleF)observedImage.ROI, priorMask); } Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds)); if (homography != null) //set the initial tracking window to be the whole image { PointF[] points = pts.Clone() as PointF[]; homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) points[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); } } }