/// <summary> /// 画像の描画 /// </summary> public void DrawImage() { graphics.Clear(BackColor); srcBmp = new Bitmap(Util.GetImage(ImageFilePath)); RectangleF rect = new RectangleF(-0.5f, -0.5f, srcBmp.Width, srcBmp.Height); PointF[] points = new PointF[] { new PointF(rect.Left, rect.Top), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Bottom) }; // 描画先の座標をアフィン変換で求める(左上、右上、左下の順) PointF[] destPoints = (PointF[])points.Clone(); // 描画先の座標をアフィン変換で求める(変換後の座標は上書きされる) MatAffine.TransformPoints(destPoints); // 描画 graphics.DrawImage( srcBmp, destPoints, rect, GraphicsUnit.Pixel ); // 再描画 Refresh(); }
internal static float ConvertSweepAngle(float sweepAngle, float startAngle, SpatialTransform transform, CoordinateSystem targetSystem) { PointF x = new PointF(100, 0); PointF[] startVector = new PointF[] { x }; Matrix rotation = new Matrix(); rotation.Rotate(startAngle); rotation.TransformVectors(startVector); PointF[] sweepVector = (PointF[])startVector.Clone(); rotation.Reset(); rotation.Rotate(sweepAngle); rotation.TransformVectors(sweepVector); rotation.Dispose(); SizeF startVectorTransformed, sweepVectorTransformed; if (targetSystem == Graphics.CoordinateSystem.Destination) { startVectorTransformed = transform.ConvertToDestination(new SizeF(startVector[0])); sweepVectorTransformed = transform.ConvertToDestination(new SizeF(sweepVector[0])); } else { startVectorTransformed = transform.ConvertToSource(new SizeF(startVector[0])); sweepVectorTransformed = transform.ConvertToSource(new SizeF(sweepVector[0])); } // simply return the angle between the start and sweep angle, in the target system. return((int)Math.Round(Vector.SubtendedAngle(sweepVectorTransformed.ToPointF(), PointF.Empty, startVectorTransformed.ToPointF()))); }
private PointF GetActualPointFromControlPoint(PointF point, PointF center) { var p = point.Clone(); var transform = Transform; p.Translate(-center.X, -center.Y); p.Scale(transform.ScaleX, transform.ScaleY); p.Rotate(transform.Angle); p.Translate(transform.TranslateX, transform.TranslateY); p.Translate(center.X, center.Y); return(p); }
private PointF GetActualPoint(PointF controlPoint) { var controlCenter = ControlCenter; var transform = Transform; var point = controlPoint.Clone(); point.Translate(-controlCenter.X, -controlCenter.Y); point.Scale(transform.ScaleX, transform.ScaleY); point.Rotate(transform.Angle); point.Translate(transform.TranslateX, transform.TranslateY); point.Translate(controlCenter.X, controlCenter.Y); return(point); }
public void TestBruteForceHammingDistance() { if (CudaInvoke.HasCuda) { Image <Gray, byte> box = new Image <Gray, byte>("box.png"); FastDetector fast = new FastDetector(100, true); BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32); #region extract features from the object image Stopwatch stopwatch = Stopwatch.StartNew(); VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint(); fast.DetectRaw(box, modelKeypoints); Mat modelDescriptors = new Mat(); brief.Compute(box, modelKeypoints, modelDescriptors); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png"); #region extract features from the observed image stopwatch.Reset(); stopwatch.Start(); VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint(); fast.DetectRaw(observedImage, observedKeypoints); Mat observedDescriptors = new Mat(); brief.Compute(observedImage, observedKeypoints, observedDescriptors); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion HomographyMatrix homography = null; using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time. { stopwatch.Reset(); stopwatch.Start(); CudaBruteForceMatcher hammingMatcher = new CudaBruteForceMatcher(DistanceType.Hamming); //BruteForceMatcher hammingMatcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.Hamming, modelDescriptors); int k = 2; Matrix <int> trainIdx = new Matrix <int>(observedKeypoints.Size, k); Matrix <float> distance = new Matrix <float>(trainIdx.Size); using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors)) //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true)) //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true)) using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch()) { Stopwatch w2 = Stopwatch.StartNew(); hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k); w2.Stop(); Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec", w2.ElapsedMilliseconds)); //gpuTrainIdx.Download(trainIdx); //gpuDistance.Download(distance); Matrix <Byte> mask = new Matrix <byte>(distance.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2); } nonZeroCount = CvInvoke.CountNonZero(mask); } stopwatch.Stop(); Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec", stopwatch.ElapsedMilliseconds)); } } if (homography != null) { Rectangle rect = box.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; PointF[] points = pts.Clone() as PointF[]; homography.ProjectPoints(points); //Merge the object image and the observed image into one big image for display Image <Gray, Byte> res = box.ConcateVertical(observedImage); for (int i = 0; i < points.Length; i++) { points[i].Y += box.Height; } res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); //ImageViewer.Show(res); } } }
// Transform points according to a matrix. Does NOT change the input points. public static PointF[] TransformPoints(PointF[] pts, Matrix matrix) { #if false Point[] wpfPoints = new Point[pts.Length]; for (int i = 0; i < pts.Length; ++i) wpfPoints[i] = new Point(pts[i].X, pts[i].Y); matrix.Transform(wpfPoints); PointF[] xformedPts = new PointF[wpfPoints.Length]; for (int i = 0; i < wpfPoints.Length; ++i) xformedPts[i] = new PointF((float) wpfPoints[i].X, (float) wpfPoints[i].Y); return xformedPts; #else PointF[] xformedPts = (PointF[]) pts.Clone(); matrix.TransformPoints(xformedPts); return xformedPts; #endif }
public static bool TestFeature2DTracker(Feature2D keyPointDetector, Feature2D descriptorGenerator) { //for (int k = 0; k < 1; k++) { Feature2D feature2D = null; if (keyPointDetector == descriptorGenerator) { feature2D = keyPointDetector as Feature2D; } Mat modelImage = EmguAssert.LoadMat("box.png"); //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg"); //modelImage = modelImage.Resize(400, 400, true); //modelImage._EqualizeHist(); #region extract features from the object image Stopwatch stopwatch = Stopwatch.StartNew(); VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint(); Mat modelDescriptors = new Mat(); if (feature2D != null) { feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false); } else { keyPointDetector.DetectRaw(modelImage, modelKeypoints); descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors); } stopwatch.Stop(); EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg"); Image <Gray, Byte> observedImage = EmguAssert.LoadImage <Gray, byte>("box_in_scene.png"); //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0)); //image = image.Resize(400, 400, true); //observedImage._EqualizeHist(); #region extract features from the observed image stopwatch.Reset(); stopwatch.Start(); VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint(); using (Mat observedDescriptors = new Mat()) { if (feature2D != null) { feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false); } else { keyPointDetector.DetectRaw(observedImage, observedKeypoints); descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors); } stopwatch.Stop(); EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Merge the object image and the observed image into one big image for display Image <Gray, Byte> res = modelImage.ToImage <Gray, Byte>().ConcateVertical(observedImage); Rectangle rect = new Rectangle(Point.Empty, modelImage.Size); PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; Mat homography = null; stopwatch.Reset(); stopwatch.Start(); int k = 2; DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2; //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k)) //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k)) using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch()) using (BFMatcher matcher = new BFMatcher(dt)) { //ParamDef[] parameterDefs = matcher.GetParams(); matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); //mask.SetValue(255); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2); } } } stopwatch.Stop(); EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds)); bool success = false; if (homography != null) { PointF[] points = pts.Clone() as PointF[]; points = CvInvoke.PerspectiveTransform(points, homography); //homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) { points[i].Y += modelImage.Height; } res.DrawPolyline( #if NETFX_CORE Extensions. #else Array. #endif ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); success = true; } //Emgu.CV.UI.ImageViewer.Show(res); return(success); } /* * stopwatch.Reset(); stopwatch.Start(); * //set the initial region to be the whole image * using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size)) * { * priorMask.SetValue(1.0); * homography = tracker.CamShiftTrack( * observedFeatures, * (RectangleF)observedImage.ROI, * priorMask); * } * Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds)); * * if (homography != null) //set the initial tracking window to be the whole image * { * PointF[] points = pts.Clone() as PointF[]; * homography.ProjectPoints(points); * * for (int i = 0; i < points.Length; i++) * points[i].Y += modelImage.Height; * res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); * return true; * } * else * { * return false; * }*/ } }
private IList getImages(bool update) { IList retlist = new ArrayList(); int nWidth = mapview.PicWidth; Dictionary <string, ClassImage> table = new Dictionary <string, ClassImage>(); foreach (IGraph graph in documentControl1.SVGDocument.SelectCollection) { if (graph != null) { if (graph is ItopVector.Core.Figure.Line) { Line ll = (graph as Line); PointF[] pts = (PointF[])ll.Points.Clone(); graph.Transform.Matrix.TransformPoints(pts); documentControl1.DrawArea.PointToSystem(pts); //PointF pf2 = ll.CenterPoint; //pf2 = documentControl1.DrawArea.PointToSystem(pf2); PointF pf3 = ll.CenterPoint;// new PointF((pts[0].X + pts[1].X) / 2, (pts[1].Y + pts[1].Y) / 2); //this.documentControl1.SetToolTip(LineAndRect(pts[0],pts[1],new RectangleF(100,100,100,100)).ToString()); int width = Convert.ToInt32(Math.Abs(pts[0].X - pts[1].X)); int height = Convert.ToInt32(Math.Abs(pts[0].Y - pts[1].Y)); IList <ClassImage> list = mapview.GetMapList(width, height, new Point((int)pf3.X, (int)pf3.Y)); StringBuilder sb = new StringBuilder(); Point off1 = new Point((int)Math.Min(pts[0].X, pts[1].X), (int)Math.Min(pts[0].Y, pts[1].Y)); pts[0].X -= off1.X; pts[0].Y -= off1.Y; pts[1].X -= off1.X; pts[1].Y -= off1.Y; //GraphicsPath gp = graph.GPath.Clone() as GraphicsPath; //using (Matrix matrix1 = graph.Transform.Matrix.Clone()) { // matrix1.Multiply(graph.GraphTransform.Matrix); // matrix1.Translate(-off1.X, -off1.Y); // gp.Transform(matrix1); //} Graphics g = documentControl1.CreateGraphics(); foreach (ClassImage mc in list) { Rectangle rt = mc.Bounds; //rt.Location = documentControl1.DrawArea.PointToView(rt.Location); //rt.Location.Offset(off1); string pic = mc.PicUrl; if (LineAndRect(pts[0], pts[1], rt)) { sb.AppendLine(string.Format("{0}_{1}_{2}", mc.PicUrl, rt.Left, rt.Top)); if (table.ContainsKey(pic)) { mc.PicImage = table[pic].PicImage; table[pic] = mc; } else { table.Add(pic, mc); } System.Drawing.Image image = new Bitmap(nWidth, nWidth); Graphics g1 = Graphics.FromImage(image); g1.Clear(Color.White); if (mc.PicImage == null) { } else { //g = Graphics.FromImage(mc.PicImage); g1.DrawImage(mc.PicImage, 0, 0, nWidth, nWidth); } Matrix matrix2 = graph.GraphTransform.Matrix; PointF pf22 = off1; pf22.X += mc.Left; pf22.Y += mc.Top; pf22 = documentControl1.DrawArea.PointToView(pf22); matrix2.Translate(-matrix2.OffsetX, -matrix2.OffsetY, MatrixOrder.Append); //Matrix matrix2 matrix2.Translate(-pf22.X * documentControl1.ScaleRatio, -pf22.Y * documentControl1.ScaleRatio, MatrixOrder.Append); //matrix2.Translate( //g1.TranslateTransform(pf22.X, pf22.Y); //graph.GraphTransform.Matrix= graph.Draw(g1, 0); mc.PicImage = image; g1.Dispose(); image.Save(Application.StartupPath + "\\png\\" + mc.PicUrl.Replace('/', '~'), ImageFormat.Png); } } //this.documentControl1.SetToolTip(sb.ToString()); } else if (!(graph is SVG)) { RectangleF rf = graph.GetBounds(); GraphicsPath path2 = new GraphicsPath(); path2.AddRectangle(rf); if (!(graph is Use)) { rf = graph.GPath.GetBounds(graph.Transform.Matrix); } //graph.Transform.Matrix.TransformPoints(pts); PointF[] pts = new PointF[2] { rf.Location, new PointF(rf.Right, rf.Bottom) }; PointF[] pts2 = (PointF[])pts.Clone(); documentControl1.DrawArea.PointToSystem(pts); PointF pf3 = graph.CenterPoint; int width = Convert.ToInt32(Math.Abs(pts[0].X - pts[1].X)); int height = Convert.ToInt32(Math.Abs(pts[0].Y - pts[1].Y)); IList <ClassImage> list = mapview.GetMapList(width, height, new Point((int)pf3.X, (int)pf3.Y)); StringBuilder sb = new StringBuilder(); PointF off1 = pts[0]; GraphicsPath gp = graph.GPath.Clone() as GraphicsPath; using (Matrix matrix1 = graph.Transform.Matrix.Clone()) { matrix1.Multiply(documentControl1.DrawArea.CoordTransform, MatrixOrder.Append); matrix1.Translate(-off1.X, -off1.Y, MatrixOrder.Append); gp.Transform(matrix1); } Graphics g = documentControl1.CreateGraphics(); Graphics g1 = null; foreach (ClassImage mc in list) { string pic = mc.PicUrl; RectangleF rt = mc.Bounds; Region r = new Region(rt); rt = r.GetBounds(g); r.Intersect(gp); if (!r.IsEmpty(g)) { if (table.ContainsKey(pic)) { mc.PicImage = table[pic].PicImage; table[pic] = mc; } else { table.Add(pic, mc); } sb.AppendLine(string.Format("{0}_{1}_{2}", mc.PicUrl, rt.Left, rt.Top)); System.Drawing.Image image = new Bitmap(nWidth, nWidth); g1 = Graphics.FromImage(image); g1.Clear(Color.White); if (mc.PicImage == null) { } else { //g = Graphics.FromImage(mc.PicImage); g1.DrawImage(mc.PicImage, 0, 0, nWidth, nWidth); } Matrix matrix2 = new Matrix(); matrix2.Multiply(documentControl1.DrawArea.CoordTransform); PointF pf22 = off1; if (graph is Text || graph is Use) //文字特殊处理 { matrix2 = graph.GraphTransform.Matrix; if (graph.LimitSize) //文字固定大小 { float f1 = graph.Transform.Matrix.Elements[0] - 1; matrix2 = graph.Transform.Matrix.Clone(); matrix2.Invert(); graph.LimitSize = false; if (graph is Text) { matrix2.Translate( -mc.Left - pts2[0].X - rf.Width * f1 / 2 + graph.Transform.Matrix.OffsetX, -mc.Top - pts2[0].Y - rf.Height * f1 / 2 + graph.Transform.Matrix.OffsetY, MatrixOrder.Append); } else { matrix2.Translate( -mc.Left - pts2[0].X - rf.Width / 2 + graph.Transform.Matrix.OffsetX, -mc.Top - pts2[0].Y - rf.Width / 2 + graph.Transform.Matrix.OffsetY, MatrixOrder.Append); } graph.LimitSize = true; graph.GraphTransform.Matrix = matrix2; } else { pf22.X += mc.Left; pf22.Y += mc.Top; pf22 = documentControl1.DrawArea.PointToView(pf22); matrix2.Translate(-matrix2.OffsetX, -matrix2.OffsetY, MatrixOrder.Append); matrix2.Translate(-pf22.X * documentControl1.ScaleRatio, -pf22.Y * documentControl1.ScaleRatio, MatrixOrder.Append); } } else { pf22.X += mc.Left; pf22.Y += mc.Top; pf22 = documentControl1.DrawArea.PointToView(pf22); matrix2.Translate(-matrix2.OffsetX, -matrix2.OffsetY, MatrixOrder.Append); matrix2.Translate(-pf22.X * documentControl1.ScaleRatio, -pf22.Y * documentControl1.ScaleRatio, MatrixOrder.Append); } graph.GraphTransform.Matrix = matrix2; graph.Draw(g1, 0); mc.PicImage = image; g1.Dispose(); if (update) { mapview.SetImage(mc); } else { image.Save(Application.StartupPath + "\\png\\" + mc.PicUrl.Replace('/', '~'), ImageFormat.Png); } } } g.Dispose(); GC.Collect(); //this.documentControl1.SetToolTip(sb.ToString()); } } } foreach (string key in table.Keys) { retlist.Add(table[key]); } return(retlist); }
public static object GetAnimateValue(SvgElement element, string attributename, DomType domtype, object orivalue) { PointF[] tfArray6; PointF[] tfArray7; PointF[] tfArray8; int num8; Matrix matrix1 = new Matrix(); string text1 = string.Empty; GraphicsPath path1 = null; string text2 = string.Empty; PointF[] tfArray1 = null; bool flag1 = true; if (element.AnimateNameValues.ContainsKey(attributename)) { AnimateInfo info1 = (AnimateInfo)element.AnimateNameValues[attributename]; object[] objArray1 = info1.AnimateValues; bool[] flagArray1 = info1.ValueAdds; int num1 = 0; if ((domtype == DomType.SvgString) || (domtype == DomType.SvgLink)) { for (int num2 = objArray1.Length - 1; num2 >= 0; num2--) { if ((objArray1[num2] is string) && (objArray1[num2].ToString() != string.Empty)) { if (element is ItopVector.Core.Figure.Image) { ((ItopVector.Core.Figure.Image)element).RefImage = ImageFunc.GetImageForURL(objArray1[num2].ToString(), element); } return(objArray1[num2].ToString()); } } return(orivalue); } object[] objArray2 = objArray1; for (int num10 = 0; num10 < objArray2.Length; num10++) { PointF[] tfArray2; float single3; GraphicsPath path2; PointF[] tfArray3; PointF[] tfArray4; PointF[] tfArray5; object obj1 = objArray2[num10]; bool flag2 = flagArray1[num1]; switch (domtype) { case DomType.SvgMatrix: { Matrix matrix2 = new Matrix(); if ((obj1 != null) && (obj1.ToString() != string.Empty)) { matrix2 = ((Matrix)obj1).Clone(); } if (flag2) { matrix1.Multiply(matrix2); goto Label_046F; } matrix1 = matrix2; goto Label_046F; } case DomType.SvgNumber: { single3 = 0f; if ((obj1 != null) && (obj1.ToString() != string.Empty)) { single3 = (float)obj1; if (!flag2 || (text1 == string.Empty)) { goto Label_0246; } float single9 = float.Parse(text1) + single3; text1 = single9.ToString(); } goto Label_046F; } case DomType.SvgString: { goto Label_046F; } case DomType.SvgColor: { string text3 = string.Empty; if ((obj1 != null) && (obj1.ToString() != string.Empty)) { text3 = (string)obj1; } if (text3 != string.Empty) { if ((flag2 && (text2 != string.Empty)) && (!text2.Trim().StartsWith("url") && !text3.Trim().StartsWith("url"))) { Color color1 = ColorFunc.ParseColor(text3); Color color2 = ColorFunc.ParseColor(text2); int num4 = (color1.R + color2.R) / 2; int num5 = (color1.G + color2.G) / 2; int num6 = (color1.B + color2.B) / 2; string[] textArray1 = new string[7] { "rgb(", num4.ToString(), ",", num5.ToString(), ",", num6.ToString(), ")" }; text2 = string.Concat(textArray1); goto Label_046F; } text2 = text3; } goto Label_046F; } case DomType.SvgPath: { if ((obj1 != null) && (obj1.ToString() != string.Empty)) { path2 = (GraphicsPath)obj1; if (!flag2 || (path1 == null)) { goto Label_0460; } tfArray3 = path2.PathPoints; tfArray4 = path1.PathPoints; if (tfArray3.Length == tfArray4.Length) { goto Label_03B5; } } goto Label_046F; } case DomType.SvgPoints: { tfArray2 = new PointF[0]; if (obj1 is PointF[]) { tfArray2 = (PointF[])obj1; } if (!flag2) { break; } if (tfArray1.Length == tfArray2.Length) { for (int num3 = 0; num3 < tfArray2.Length; num3++) { PointF tf1 = tfArray1[num3]; PointF tf2 = tfArray2[num3]; float single1 = (tf1.X + tf2.X) / 2f; float single2 = (tf1.Y + tf2.Y) / 2f; tfArray1[num3] = new PointF(single1, single2); } } goto Label_046F; } default: { goto Label_046F; } } tfArray1 = (PointF[])tfArray2.Clone(); goto Label_046F; Label_0246: text1 = single3.ToString(); goto Label_046F; Label_03B5: tfArray5 = new PointF[tfArray4.Length]; Array.Copy(tfArray3, tfArray1, tfArray5.Length); byte[] buffer1 = path2.PathTypes; byte[] buffer2 = path1.PathTypes; for (int num7 = 0; num7 < Math.Min(tfArray3.Length, tfArray4.Length); num7++) { PointF tf3 = tfArray3[num7]; PointF tf4 = tfArray4[num7]; float single4 = tf3.X + tf4.X; float single5 = tf3.Y + tf4.Y; tfArray5[num7] = new PointF(single4, single5); } path1 = new GraphicsPath(tfArray5, path2.PathTypes); goto Label_046D; Label_0460: path1 = (GraphicsPath)path2.Clone(); Label_046D :; Label_046F :; } if (flagArray1.Length > 0) { flag1 = flagArray1[flagArray1.Length - 1]; } } switch (domtype) { case DomType.SvgMatrix: { Matrix matrix3 = new Matrix(); if (orivalue != null) { matrix3 = ((Matrix)orivalue).Clone(); } if (flag1) { matrix3.Multiply(matrix1); } else { matrix3 = matrix1.Clone(); } return(matrix3.Clone()); } case DomType.SvgNumber: { if ((flag1 && (orivalue != null)) && (orivalue.ToString() != string.Empty)) { float single6 = (float)orivalue; if (text1 == string.Empty) { text1 = single6.ToString(); break; } float single10 = float.Parse(text1) + single6; text1 = single10.ToString(); } break; } case DomType.SvgString: { return(orivalue); } case DomType.SvgColor: { if (text2 == string.Empty) { return(orivalue); } if ((flag1 && (orivalue != null)) && (!text2.Trim().StartsWith("url") && !((string)orivalue).Trim().StartsWith("url"))) { Color color3 = ColorFunc.ParseColor((string)orivalue); Color color4 = ColorFunc.ParseColor(text2); string[] textArray2 = new string[7]; textArray2[0] = "rgb("; int num11 = (color3.R + color4.R) / 2; textArray2[1] = num11.ToString(); textArray2[2] = ","; int num12 = (color3.G + color4.G) / 2; textArray2[3] = num12.ToString(); textArray2[4] = ","; int num13 = (color3.B + color4.B) / 2; textArray2[5] = num13.ToString(); textArray2[6] = ")"; text2 = string.Concat(textArray2); } return(text2); } case DomType.SvgPath: { if (path1 == null) { return(orivalue); } if (!flag1 || (orivalue == null)) { return(path1); } tfArray6 = ((GraphicsPath)orivalue).PathPoints; tfArray7 = path1.PathPoints; tfArray8 = new PointF[tfArray6.Length]; Array.Copy(tfArray6, tfArray1, tfArray8.Length); num8 = 0; goto Label_0738; } case DomType.SvgPoints: { if (tfArray1.Length > 0) { PointF[] tfArray9 = new PointF[0]; if (!(orivalue is PointF[]) || !flag1) { return(tfArray1); } tfArray9 = (PointF[])orivalue; if (tfArray9.Length != tfArray1.Length) { return(tfArray1); } for (int num9 = 0; num9 < tfArray1.Length; num9++) { tfArray1[num9] = new PointF((tfArray1[num9].X + tfArray9[num9].X) / 2f, (tfArray1[num9].Y + tfArray9[num9].Y) / 2f); } } return(tfArray1); } default: { return(string.Empty); } } if (text1 != string.Empty) { return(float.Parse(text1)); } if ((orivalue.ToString() == string.Empty) || (orivalue == null)) { return((float)AttributeFunc.GetDefaultValue(element, attributename)); } return((float)orivalue); Label_0738: if (num8 >= Math.Min(tfArray6.Length, tfArray7.Length)) { return(new GraphicsPath(tfArray8, path1.PathTypes)); } PointF tf5 = tfArray6[num8]; PointF tf6 = tfArray7[num8]; float single7 = tf5.X + tf6.X; float single8 = tf5.Y + tf6.Y; tfArray8[num8] = new PointF(single7, single8); num8++; goto Label_0738; }
// Constructor. public PolygonPathObject(PointF[] points) { this.points = (PointF[])(points.Clone()); int iCount = this.points.Length; this.pathPoints = new PointF[iCount+1]; this.points.CopyTo( this.pathPoints, 0 ); this.pathPoints[iCount] = this.pathPoints[0]; }
// Constructor. public LinesPathObject(PointF[] points) { this.points = (PointF[])(points.Clone()); }
// Constructor. public CurvePathObject(PointF[] points, int offset, int numberOfSegments, float tension) { this.points = (PointF[])(points.Clone()); this.offset = offset; this.numberOfSegments = numberOfSegments; this.tension = tension; this.pathPoints = ComputePathPoints(); }
// Constructor. public ClosedCurvePathObject(PointF[] points, float tension) { this.points = (PointF[])(points.Clone()); this.tension = tension; }
public BusStopPoint Clone() { return(new BusStopPoint(location.Clone(), isVertical, durationInPercent)); }