public void ShouldAddPathIfNotAlreadyPresent() { // Given PathCollection collection = new PathCollection(); collection.Add(_upperCaseB); // When collection.Add(_upperCaseA); // Then Assert.AreEqual(2, collection.Count); }
public void ShouldRespectFileSystemCaseSensitivityWhenAddingPath(bool caseSensitive, int expectedCount) { // Given PathCollection <TPath> collection = new PathCollection <TPath>(); collection.Add(_upperCaseA); // When collection.Add(_lowerCaseA); // Then Assert.AreEqual(expectedCount, collection.Count); }
private void RecalculateAllPaths() { PathCollection.Clear(); foreach (var start in StructureManager) { if (!PathCollection.ContainsKey(start.Position)) { PathCollection.Add(start.Position, new Dictionary <HexagonNode, Path>()); } foreach (var destination in StructureManager) { try { if (!PathCollection[start.Position].ContainsKey(destination.Position)) { Path path = new Path(PathFinding.AStar(start.Position, destination.Position).ToArray()); AddPath(path); if (path.AllHops.Count > 2) { var containedPaths = path.GetContainedPaths(); foreach (var containedPath in containedPaths) { AddPath(containedPath); } } } } catch (NoPathFoundException <HexagonNode> ) { } } } }
private void AddSelect(string name) { if (IsValid(name)) { _selects.Add(name); } }
/// <summary> /// Include properties from expand model. /// </summary> /// <param name="include">Properties to include.</param> public void Include(params Expression <Func <TExpandModel, ExpandableEntity> >[] include) { foreach (var expression in include) { var body = expression.Body.ToString(); var path = body.Remove(0, body.IndexOf(".", StringComparison.Ordinal) + 1); _expands.Add(path); } }
public static string ProcessArgs(Project project, string args) { lastFileFromTemplate = QuickGenerator.QuickSettings.GenerateClass.LastFileFromTemplate; lastFileOptions = QuickGenerator.QuickSettings.GenerateClass.LastFileOptions; if (lastFileFromTemplate != null) { string fileName = Path.GetFileNameWithoutExtension(lastFileFromTemplate); args = args.Replace("$(FileName)", fileName); if (args.Contains("$(FileNameWithPackage)") || args.Contains("$(Package)")) { string package = ""; string path = lastFileFromTemplate; // Find closest parent string classpath=""; if(project!=null) classpath = project.AbsoluteClasspaths.GetClosestParent(path); // Can't find parent, look in global classpaths if (classpath == null) { PathCollection globalPaths = new PathCollection(); foreach (string cp in ProjectManager.PluginMain.Settings.GlobalClasspaths) globalPaths.Add(cp); classpath = globalPaths.GetClosestParent(path); } if (classpath != null) { if (project != null) { // Parse package name from path package = Path.GetDirectoryName(ProjectPaths.GetRelativePath(classpath, path)); package = package.Replace(Path.DirectorySeparatorChar, '.'); } } args = args.Replace("$(Package)", package); if (package.Length!=0) args = args.Replace("$(FileNameWithPackage)", package + "." + fileName); else args = args.Replace("$(FileNameWithPackage)", fileName); if (lastFileOptions != null) { args = ProcessFileTemplate(args); if (processOnSwitch == null) lastFileOptions = null; } } lastFileFromTemplate = null; } return args; }
public void Should_Respect_File_System_Case_Sensitivity_When_Adding_DirectoryPath(bool caseSensitive, int expectedCount) { // Given var collection = new PathCollection(new DirectoryPath[] { "A" }, new PathComparer(caseSensitive)); // When collection.Add(new DirectoryPath("a")); // Then collection.Count.ShouldBe(expectedCount); }
public void Should_Add_DirectoryPath_If_Not_Already_Present() { // Given var collection = new PathCollection(new DirectoryPath[] { "A" }, new PathComparer(false)); // When collection.Add(new DirectoryPath("B")); // Then collection.Count.ShouldBe(2); }
public void Should_Respect_File_System_Case_Sensitivity_When_Adding_FilePaths(bool caseSensitive, int expectedCount) { // Given var collection = new PathCollection(new FilePath[] { "A.TXT", "B.TXT" }, new PathComparer(caseSensitive)); // When collection.Add(new FilePath[] { "a.txt", "b.txt", "c.txt" }); // Then collection.Count.ShouldBe(expectedCount); }
public void Should_Throw_If_Paths_Is_Null() { // Given var collection = new PathCollection(); // When var result = Record.Exception(() => collection.Add((IEnumerable <FilePath>)null)); // Then AssertEx.IsArgumentNullException(result, "paths"); }
public void Should_Add_FilePath_If_Not_Already_Present() { // Given var collection = new PathCollection(new FilePath[] { "A.txt" }, new PathComparer(false)); // When collection.Add(new FilePath("B.txt")); // Then Assert.Equal(2, collection.Count); }
public void Should_Respect_File_System_Case_Sensitivity_When_Adding_FilePath(bool caseSensitive, int expectedCount) { // Given var collection = new PathCollection(new FilePath[] { "A.TXT" }, new PathComparer(caseSensitive)); // When collection.Add(new FilePath("a.txt")); // Then Assert.Equal(expectedCount, collection.Count); }
public void Should_Add_FilePaths_That_Are_Not_Present() { // Given var collection = new PathCollection(new FilePath[] { "A.txt", "B.txt" }, new PathComparer(false)); // When collection.Add(new FilePath[] { "A.txt", "B.txt", "C.txt" }); // Then Assert.Equal(3, collection.Count); }
public void Should_Respect_File_System_Case_Sensitivity_When_Adding_DirectoryPaths(bool caseSensitive, int expectedCount) { // Given var collection = new PathCollection(new DirectoryPath[] { "A", "B" }, new PathComparer(caseSensitive)); // When collection.Add(new DirectoryPath[] { "a", "b", "c" }); // Then Assert.Equal(expectedCount, collection.Count); }
public void Should_Add_DirectoryPaths_That_Are_Not_Present() { // Given var collection = new PathCollection(new DirectoryPath[] { "A", "B" }, new PathComparer(false)); // When collection.Add(new DirectoryPath[] { "A", "B", "C" }); // Then Assert.Equal(3, collection.Count); }
private void AddPath(Path path) { if (!PathCollection.ContainsKey(path.Start)) { PathCollection.Add(path.Start, new Dictionary <HexagonNode, Path>()); } if (!PathCollection[path.Start].ContainsKey(path.Destination)) { PathCollection[path.Start].Add(path.Destination, path); } }
public void Should_Throw_If_Paths_Is_Null() { // Given var collection = new PathCollection(); // When var result = Record.Exception(() => collection.Add((IEnumerable <FilePath>)null)); // Then result.ShouldBeOfType <ArgumentNullException>() .And().ParamName.ShouldBe("paths"); }
public string ProcessArgs(Project project, string args) { if (lastFileFromTemplate != null) { string fileName = Path.GetFileNameWithoutExtension(lastFileFromTemplate); args = args.Replace("$(FileName)", fileName); if (args.Contains("$(FileNameWithPackage)") || args.Contains("$(Package)")) { string package = ""; string path = lastFileFromTemplate; // Find closest parent string classpath = project.AbsoluteClasspaths.GetClosestParent(path); // Can't find parent, look in global classpaths if (classpath == null) { PathCollection globalPaths = new PathCollection(); foreach (string cp in PluginMain.Settings.GlobalClasspaths) { globalPaths.Add(cp); } classpath = globalPaths.GetClosestParent(path); } if (classpath != null) { // Parse package name from path package = Path.GetDirectoryName(ProjectPaths.GetRelativePath(classpath, path)); package = package.Replace(Path.DirectorySeparatorChar, '.'); } if (package == "") { args = args.Replace(" $(Package)", ""); } args = args.Replace("$(Package)", package); if (package != "") { args = args.Replace("$(FileNameWithPackage)", package + "." + fileName); } else { args = args.Replace("$(FileNameWithPackage)", fileName); } } } return(args); }
public void GetFullPath_should_provide_rooted_path_when_relative_directory_is_Added() { string dotNetDir = Path.GetDirectoryName(Process.GetCurrentProcess().MainModule.FileName); string[] files = { "fileA", "fileB" }; var pc = new PathCollection { FileSystem = new TestFileSystem(), }; pc.Add("relativeDirectory"); var path = pc.GetFullPath("fileB"); Assert.Equal("/working/relativeDirectory/fileB", path); }
protected override void ProcessResource(PathCollection aPaths) { var files = GetXmlFiles(Directories.Paths); if (files.Any()) { OnCategoryProcessing(ResourceTypes.Paths); var previous = SetCurrentDirectory(Directories.Paths); foreach (var file in files) { var document = LoadXml(file); if (document != null) { var path = new Path() { Name = GetElement(document, "Name").Value, ID = GetElementValue <int>(document, "ID"), BackgroundRoom = GetElementValue <int>(document, "BackgroundRoom"), ConnectionKind = GetElementValue <ConnectionKinds>(document, "ConnectionKind"), Closed = GetElementValue <bool>(document, "Closed"), Precision = GetElementValue <byte>(document, "Precision"), SnapX = GetElementValue <int>(document, "SnapX"), SnapY = GetElementValue <int>(document, "SnapY") }; path.Points.AddRange(from element in document.Element("Points").Elements("Point") select new Path.Point() { X = GetElementValue <double>(element, "X"), Y = GetElementValue <double>(element, "Y"), Speed = GetElementValue <double>(element, "Speed") }); aPaths.Add(path); } OnAbortProcessingCallback(); } OnCategoryProcessed(ResourceTypes.Paths); SetCurrentDirectory(previous); } }
public PathCollection GetCheckedFileNames() { PathCollection fileList = new PathCollection(); for (int i = 0; i < listViewFiles.Groups.Count; i++) { fileList.Add(listViewFiles.Groups[i].Name, new SortedSet <string>()); for (int j = 0; j < listViewFiles.Groups[i].Items.Count; j++) { if (listViewFiles.Groups[i].Items[j].Checked) { fileList[listViewFiles.Groups[i].Name].Add(listViewFiles.Groups[i].Items[j].SubItems[1].Text); } } } return(fileList); }
private static PathCollection CreatePathCollection(List <string> types, List <string> names, List <string> regexes, List <string> excludeParameter) { var paths = new PathCollection(); for (var i = 0; i < types.Count; ++i) { if (types[i] != "Include" && types[i] != "Exclude") { throw new HmacInvalidConfigException( "AppSetting Dragon.Security.Hmac.PathTypes is invalid, allowed values are: Include, Exclude"); } var type = types[i] == "Include" ? PathConfig.PathType.Include : PathConfig.PathType.Exclude; paths.Add(new PathConfig { Type = type, Name = names[i], Path = regexes[i], ExcludeParameters = excludeParameter.Any() ? excludeParameter[i] : "" }); } return(paths); }
public void AddClass(Project project, string inDirectory) { string caption = "Add New Class"; string label = "Class Name:"; string defaultLine = "NewClass"; LineEntryDialog dialog = new LineEntryDialog(caption, label, defaultLine); if (dialog.ShowDialog() == DialogResult.OK) { try { string name = Path.GetFileNameWithoutExtension(dialog.Line); string path = Path.Combine(inDirectory, name + ".as"); OnFileCreated(path); // figure out the best classpath to build from string classpath = project.AbsoluteClasspaths.GetClosestParent(path); // no luck? try the global classpaths! if (classpath == null) { PathCollection globalPaths = new PathCollection(); foreach (string cp in Settings.GlobalClasspaths.Split(';')) { globalPaths.Add(cp); } classpath = globalPaths.GetClosestParent(path); } // still no luck? nothing else we can do if (classpath == null) { throw new Exception("An appropriate project classpath could not be located to create the class from."); } // figure out the full class name cleverly string className = PathHelper.GetRelativePath(classpath, path); className = className.Replace(".as", ""); className = className.Replace(Path.DirectorySeparatorChar, '.'); string constructor = className; int p = className.LastIndexOf('.'); if (p >= 0) { constructor = className.Substring(p + 1); } // MIKA: DETECT EOL AND ENCODING Encoding encoding = this.fdActions.GetDefaultEncoding(); string eolMarker = this.fdActions.GetDefaultEOLMarker(); using (FileStream stream = File.Open(path, FileMode.CreateNew)) { StreamWriter writer = new StreamWriter(stream, encoding); string code = eolMarker; code += "class " + className + eolMarker; code += "{" + eolMarker; code += "\tfunction " + constructor + "()" + eolMarker; code += "\t{" + eolMarker; code += "\t\t"; DocumentSeekRequest = code.Length; code += eolMarker; code += "\t}" + eolMarker; code += "}" + eolMarker; writer.Write(code); writer.Flush(); } OpenFile(path); } catch (Exception exception) { ErrorHandler.ShowInfo("Could not add the class: " + exception.Message); } } }
public string ProcessArgs(Project project, string args) { if (lastFileFromTemplate != null) { string fileName = Path.GetFileNameWithoutExtension(lastFileFromTemplate); args = args.Replace("$(FileName)", fileName); if (args.Contains("$(FileNameWithPackage)") || args.Contains("$(Package)")) { string package = ""; string path = lastFileFromTemplate; // Find closest parent string classpath = project.AbsoluteClasspaths.GetClosestParent(path); // Can't find parent, look in global classpaths if (classpath == null) { PathCollection globalPaths = new PathCollection(); foreach (string cp in ProjectManager.PluginMain.Settings.GlobalClasspaths) globalPaths.Add(cp); classpath = globalPaths.GetClosestParent(path); } if (classpath != null) { // Parse package name from path package = Path.GetDirectoryName(ProjectPaths.GetRelativePath(classpath, path)); package = package.Replace(Path.DirectorySeparatorChar, '.'); } args = args.Replace("$(Package)", package); args = args.Replace("$(ProName)", proName); args = args.Replace("$(ProRecName)", receiveProName); args = args.Replace("$(ProSendName)", sendProName); if (package != "") args = args.Replace("$(FileNameWithPackage)", package + "." + fileName); else args = args.Replace("$(FileNameWithPackage)", fileName); } } return args; }
/// <summary> /// Получение всех путей в графе /// Алгоритм является модификацией алгоритма Флойда /// </summary> /// <returns></returns> public Dictionary <int, PathDictionary> GetAllGraphPaths() { var vertices = new StackListQueue <Vertex>(Vertices); var prev = new PathCollection[vertices.Count, vertices.Count]; var next = new PathCollection[vertices.Count, vertices.Count]; var read = new object(); var write = new object(); Parallel.ForEach( from i in Enumerable.Range(0, vertices.Count) from j in Enumerable.Range(0, vertices.Count) select new[] { i, j }, pair => { int i = pair[0]; int j = pair[1]; lock (write) prev[i, j] = new PathCollection(); lock (write) next[i, j] = new PathCollection(); }); Parallel.ForEach( this, segment => { int i, j; lock (read) i = vertices.IndexOf(segment.First()); lock (read) j = vertices.IndexOf(segment.Last()); lock (write) prev[i, j].Add(new Path(segment)); lock (write) prev[j, i].Add(new Path(segment.GetReverse())); }); for (int m = 0; m < vertices.Count; m++) { Parallel.ForEach( from i in Enumerable.Range(0, vertices.Count) from j in Enumerable.Range(0, vertices.Count) select new[] { i, j }, pair => { PathCollection fromTo; int i = pair[0]; int j = pair[1]; lock (read) fromTo = new PathCollection(prev[i, j]); if (i != m && j != m) { PathCollection fr, to; lock (read) fr = new PathCollection(prev[i, m]); lock (read) to = new PathCollection(prev[m, j]); var passThrow = new PathCollection( from a in fr from b in to select new Path(a) { b.GetRange(1, b.Count - 1) }); passThrow.RemoveAll(path => !Path.IsSimple(new Path(path.GetRange(0, path.Count - 1)))); passThrow.RemoveAll(path => !Path.IsSimple(new Path(path.GetRange(1, path.Count - 1)))); fromTo.Add(passThrow); } lock (write) next[i, j] = fromTo; }); PathCollection[,] t = prev; prev = next; next = t; } var dictionaries = new Dictionary <int, PathDictionary> [vertices.Count, vertices.Count]; Parallel.ForEach( from i in Enumerable.Range(0, vertices.Count) from j in Enumerable.Range(0, vertices.Count) select new[] { i, j }, pair => { int i = pair[0]; int j = pair[1]; KeyValuePair <Vertex, Vertex> key; PathCollection collection; lock (read) key = new KeyValuePair <Vertex, Vertex>(vertices.ElementAt(i), vertices.ElementAt(j)); lock (read) collection = prev[i, j]; Dictionary <int, PathDictionary> dictionary1 = Enumerable.Range(2, vertices.Count) .ToDictionary(len => len, len => new PathDictionary(key, new PathCollection(collection.Where(p => p.Count == len)))); lock (write) dictionaries[i, j] = dictionary1; }); var dictionary = new Dictionary <int, PathDictionary>(); foreach (var pair in dictionaries) { foreach (int i in pair.Keys) { if (!dictionary.ContainsKey(i)) { dictionary.Add(i, pair[i]); } else { foreach (var key in pair[i].Keys) { if (!dictionary[i].ContainsKey(key)) { dictionary[i].Add(key, pair[i][key]); } else { dictionary[i][key].AddRange(pair[i][key]); } } } } } return(dictionary); }
private static void ConsolidatePaths(Symbol symbol, DVexWriter writer) { List <FillStyle> fills = new List <FillStyle>(); List <StrokeStyle> strokes = new List <StrokeStyle>(); fills.Add(new SolidFill(Color.Transparent)); strokes.Add(new SolidStroke(0.0F, Color.Transparent)); ArrayList allPaths = new ArrayList(); ArrayList allSrs = new ArrayList(); // Find all used colors/strokes, and the F0,F1,S info for each seg foreach (Shape sh in symbol.Shapes) { foreach (IShapeData s in sh.ShapeData) { int fill = 0; int stroke = 0; if (!fills.Contains(shape.Fills[s.FillIndex])) { fill = fills.Add(shape.Fills[s.FillIndex]); } else { fill = fills.IndexOf(shape.Fills[s.FillIndex]); } if (!strokes.Contains(shape.Strokes[s.StrokeIndex])) { stroke = strokes.Add(shape.Strokes[s.StrokeIndex]); } else { stroke = strokes.IndexOf(shape.Strokes[s.StrokeIndex]); } // break path into shape records foreach (IPathPrimitive ipp in s.Path) { if (ipp is IShapeData) { IShapeData ip = (IShapeData)ipp; if (allPaths.Contains(ip)) { // this must be a fill1 if it is a dup int index = allPaths.IndexOf(ip); Shrec sr = (Shrec)allSrs[index]; Shrec newShrec = new Shrec(0, 0); newShrec.F0 = (sr.F0 == 0) ? fill : sr.F0; newShrec.F1 = (sr.F1 == 0) ? fill : sr.F1; newShrec.S = (sr.S == 0) ? stroke : sr.S; allSrs[index] = newShrec; } else { allSrs.Add(new Shrec(fill, stroke)); allPaths.Add(ip); } } } } // end groups } // end shapes // ok, now write out colors // sort fills by rgb, argb, and gradients ArrayList orderedFills = new ArrayList(); ArrayList rgbas = new ArrayList(); ArrayList gfs = new ArrayList(); foreach (Fill sf in fills) { if (sf is SolidFill) { if (((SolidFill)sf).Color.A == 255 || (SolidFill)sf == fills[0]) // 'no fill' { orderedFills.Add(sf); } else { rgbas.Add(sf); } } else if (sf is GradientFill) { gfs.Add(sf); } else { // bitmap fills orderedFills.Add(new SolidFill(Color.Gray)); }; } SolidFill[] wrgbs = new SolidFill[orderedFills.Count]; wrgbs[0] = new SolidFill(Color.FromArgb(255, 0, 0, 0)); int fRgb = 1; foreach (Fill f in orderedFills) { if (f != fills[0]) { wrgbs[fRgb++] = (SolidFill)f; } } int fRgba = 0; SolidFill[] wrgbas = new SolidFill[rgbas.Count]; foreach (Fill f in rgbas) { orderedFills.Add(f); wrgbas[fRgba++] = (SolidFill)f; } int fGr = 0; GradientFill[] wgfs = new GradientFill[gfs.Count]; foreach (Fill f in gfs) { orderedFills.Add(f); wgfs[fGr++] = (GradientFill)(f); } writer.WriteNbitColorDefs(wrgbs); writer.WriteNbitColorDefs(wrgbas); writer.WriteNbitGradientDefs(wgfs); //writer.WriteRgbColorDefs(wrgbs); //writer.WriteRgbaColorDefs(wrgbas); //writer.WriteGradientColorDefs(wgfs); // ok, colors written, now strokes // write out all the stroke defs second // get counts int wrgbCount = 0; int wrgbaCount = 0; foreach (Stroke st in strokes) { if (st.Color.A == 255 || st == strokes[0]) { wrgbCount++; } else { wrgbaCount++; } } // create stroke arrays Stroke[] wsrgbs = new Stroke[wrgbCount]; Stroke[] wsrgbas = new Stroke[wrgbaCount]; int sRgb = 0; int sRgba = 0; foreach (Stroke st in strokes) { if (st.Color.A == 255 || st == strokes[0]) { wsrgbs[sRgb++] = st; } else { wsrgbas[sRgba++] = st; } } // now write the stroke data writer.WriteNbitStrokeDefs(wsrgbs); writer.WriteNbitStrokeDefs(wsrgbas); //writer.WriteRgbStrokeDefs(wsrgbs); //writer.WriteRgbaStrokeDefs(wsrgbas); // and now paths // valid pathsegs must have the same F0, F1, and S ArrayList tempPaths = new ArrayList(); ArrayList tempSrsAl = new ArrayList(); PathCollection pc = new PathCollection(); Shrec curShrec = Shrec.Empty; for (int i = 0; i < allSrs.Count; i++) //Shrec sr in srsAl) { Shrec sr = (Shrec)allSrs[i]; if (sr.Equals(curShrec) || curShrec.Equals(Shrec.Empty)) { //add to path pc.Add((IShapeData)allPaths[i]); } else { // write to hash tempPaths.Add(pc); tempSrsAl.Add(curShrec); pc = new PathCollection(); pc.Add((IShapeData)allPaths[i]); } curShrec = sr; } if (!tempSrsAl.Contains(curShrec)) { tempPaths.Add(pc); tempSrsAl.Add(curShrec); } // split non contig paths ArrayList paths = new ArrayList(); ArrayList srsAl = new ArrayList(); foreach (PathCollection pcoll in tempPaths) { //pcoll.ReorderPath(); PathCollection[] pcolls = pcoll.SplitPath(); foreach (PathCollection splitP in pcolls) { paths.Add(splitP); srsAl.Add(tempSrsAl[tempPaths.IndexOf(pcoll)]); //writer.WritePath(splitP.PointSegments); } } IShapeData[][] ips = new IShapeData[paths.Count][]; for (int i = 0; i < paths.Count; i++) { ips[i] = ((PathCollection)paths[i]).PointSegments; } writer.WritePaths(ips); // convert to array Shrec[] srs = new Shrec[srsAl.Count]; for (int i = 0; i < srsAl.Count; i++) { srs[i] = (Shrec)srsAl[i]; } // and finally, uses - must be sorted by fill color // use order Fill1 (no strokes), fill0[stroke], stroke only's // for each fill index{..}, then dangling strokes ArrayList shapeRecords = new ArrayList(); // start at 1 to avoid empty fills foreach (Fill f in orderedFills) { int curFill = fills.IndexOf(f); if (curFill != 0) { // all F1's of this color first ArrayList Fs = new ArrayList(); for (int i = 0; i < srs.Length; i++) { if (srs[i].F0 == curFill) { // add use for F0 ShapeRecord curSr = new ShapeRecord(); curSr.Fill = orderedFills.IndexOf(f); curSr.Stroke = srs[i].S; curSr.Path = i; Fs.Add(curSr); } if (srs[i].F1 == curFill) { // add use for F1 ShapeRecord curSr = new ShapeRecord(); curSr.Fill = orderedFills.IndexOf(f); curSr.Stroke = 0; curSr.Path = i; Fs.Add(curSr); } } //now sort the F1s from tip to tail if (Fs.Count > 0) { ArrayList finalFs = new ArrayList(); finalFs.Add(Fs[0]); PointF end = ((PathCollection)paths[((ShapeRecord)Fs[0]).Path]).LastPoint; Fs.RemoveAt(0); while (Fs.Count > 0) { bool found = false; foreach (ShapeRecord sr in Fs) { PathCollection srp = (PathCollection)paths[sr.Path]; if (srp.FirstPoint == end) { end = srp.LastPoint; finalFs.Add(sr); Fs.Remove(sr); found = true; break; } } if (found == false) { finalFs.Add(Fs[0]); end = ((PathCollection)paths[ ((ShapeRecord)Fs[0]).Path]).LastPoint; Fs.RemoveAt(0); } } // and write them foreach (ShapeRecord sr in finalFs) { shapeRecords.Add(sr); } } } } for (int i = 0; i < srs.Length; i++) { if (srs[i].F0 == 0 && srs[i].F1 == 0) { // must be stroke ShapeRecord curSr = new ShapeRecord(); curSr.Fill = 0; curSr.Stroke = srs[i].S; curSr.Path = i; shapeRecords.Add(curSr); } } // convert to array ShapeRecord[] srecs = new ShapeRecord[shapeRecords.Count]; for (int i = 0; i < shapeRecords.Count; i++) { srecs[i] = (ShapeRecord)shapeRecords[i]; } writer.WriteUses(srecs); }
private static void ConsolidatePaths(Symbol symbol, DVexWriter writer) { List<FillStyle> fills = new List<FillStyle>(); List<StrokeStyle> strokes = new List<StrokeStyle>(); fills.Add( new SolidFill(Color.Transparent) ); strokes.Add( new SolidStroke(0.0F, Color.Transparent) ); ArrayList allPaths = new ArrayList(); ArrayList allSrs = new ArrayList(); // Find all used colors/strokes, and the F0,F1,S info for each seg foreach(Shape sh in symbol.Shapes) { foreach(IShapeData s in sh.ShapeData) { int fill = 0; int stroke = 0; if (!fills.Contains(shape.Fills[s.FillIndex])) { fill = fills.Add(shape.Fills[s.FillIndex]); } else { fill = fills.IndexOf(shape.Fills[s.FillIndex]); } if( !strokes.Contains(shape.Strokes[s.StrokeIndex]) ) { stroke = strokes.Add(shape.Strokes[s.StrokeIndex]); } else { stroke = strokes.IndexOf(shape.Strokes[s.StrokeIndex]); } // break path into shape records foreach(IPathPrimitive ipp in s.Path) { if(ipp is IShapeData) { IShapeData ip = (IShapeData)ipp; if(allPaths.Contains(ip)) { // this must be a fill1 if it is a dup int index = allPaths.IndexOf(ip); Shrec sr = (Shrec)allSrs[index]; Shrec newShrec = new Shrec(0, 0); newShrec.F0 = (sr.F0 == 0) ? fill : sr.F0 ; newShrec.F1 = (sr.F1 == 0) ? fill : sr.F1 ; newShrec.S = (sr.S == 0) ? stroke : sr.S ; allSrs[index] = newShrec; } else { allSrs.Add(new Shrec(fill, stroke)); allPaths.Add(ip); } } } } // end groups } // end shapes // ok, now write out colors // sort fills by rgb, argb, and gradients ArrayList orderedFills = new ArrayList(); ArrayList rgbas = new ArrayList(); ArrayList gfs = new ArrayList(); foreach(Fill sf in fills) { if(sf is SolidFill) { if( ((SolidFill)sf).Color.A == 255 || (SolidFill)sf == fills[0]) // 'no fill' { orderedFills.Add(sf); } else { rgbas.Add(sf); } } else if(sf is GradientFill) { gfs.Add(sf); } else { // bitmap fills orderedFills.Add(new SolidFill(Color.Gray)); }; } SolidFill[] wrgbs = new SolidFill[orderedFills.Count]; wrgbs[0] = new SolidFill(Color.FromArgb(255,0,0,0)); int fRgb = 1; foreach(Fill f in orderedFills) { if(f != fills[0]) { wrgbs[fRgb++] = (SolidFill)f; } } int fRgba = 0; SolidFill[] wrgbas = new SolidFill[rgbas.Count]; foreach(Fill f in rgbas) { orderedFills.Add(f); wrgbas[fRgba++] = (SolidFill)f; } int fGr = 0; GradientFill[] wgfs = new GradientFill[gfs.Count]; foreach(Fill f in gfs) { orderedFills.Add(f); wgfs[fGr++] = (GradientFill)(f); } writer.WriteNbitColorDefs(wrgbs); writer.WriteNbitColorDefs(wrgbas); writer.WriteNbitGradientDefs(wgfs); //writer.WriteRgbColorDefs(wrgbs); //writer.WriteRgbaColorDefs(wrgbas); //writer.WriteGradientColorDefs(wgfs); // ok, colors written, now strokes // write out all the stroke defs second // get counts int wrgbCount = 0; int wrgbaCount = 0; foreach(Stroke st in strokes) { if(st.Color.A == 255 || st == strokes[0]) {wrgbCount++;} else{wrgbaCount++;} } // create stroke arrays Stroke[] wsrgbs = new Stroke[wrgbCount]; Stroke[] wsrgbas = new Stroke[wrgbaCount]; int sRgb = 0; int sRgba = 0; foreach(Stroke st in strokes) { if( st.Color.A == 255 || st == strokes[0]) { wsrgbs[sRgb++] = st; } else { wsrgbas[sRgba++] = st; } } // now write the stroke data writer.WriteNbitStrokeDefs(wsrgbs); writer.WriteNbitStrokeDefs(wsrgbas); //writer.WriteRgbStrokeDefs(wsrgbs); //writer.WriteRgbaStrokeDefs(wsrgbas); // and now paths // valid pathsegs must have the same F0, F1, and S ArrayList tempPaths = new ArrayList(); ArrayList tempSrsAl = new ArrayList(); PathCollection pc = new PathCollection(); Shrec curShrec = Shrec.Empty; for(int i = 0; i < allSrs.Count; i++) //Shrec sr in srsAl) { Shrec sr = (Shrec)allSrs[i]; if(sr.Equals(curShrec) || curShrec.Equals(Shrec.Empty)) { //add to path pc.Add((IShapeData)allPaths[i]); } else { // write to hash tempPaths.Add(pc); tempSrsAl.Add(curShrec); pc = new PathCollection(); pc.Add((IShapeData)allPaths[i]); } curShrec = sr; } if(!tempSrsAl.Contains(curShrec)) { tempPaths.Add(pc); tempSrsAl.Add(curShrec); } // split non contig paths ArrayList paths = new ArrayList(); ArrayList srsAl = new ArrayList(); foreach(PathCollection pcoll in tempPaths) { //pcoll.ReorderPath(); PathCollection[] pcolls = pcoll.SplitPath(); foreach(PathCollection splitP in pcolls) { paths.Add(splitP); srsAl.Add(tempSrsAl[tempPaths.IndexOf(pcoll)] ); //writer.WritePath(splitP.PointSegments); } } IShapeData[][] ips = new IShapeData[paths.Count][]; for(int i = 0; i < paths.Count; i++) { ips[i] = ((PathCollection)paths[i]).PointSegments; } writer.WritePaths(ips); // convert to array Shrec[] srs = new Shrec[srsAl.Count]; for(int i = 0; i < srsAl.Count; i++) { srs[i] = (Shrec)srsAl[i]; } // and finally, uses - must be sorted by fill color // use order Fill1 (no strokes), fill0[stroke], stroke only's // for each fill index{..}, then dangling strokes ArrayList shapeRecords = new ArrayList(); // start at 1 to avoid empty fills foreach(Fill f in orderedFills) { int curFill = fills.IndexOf(f); if(curFill != 0) { // all F1's of this color first ArrayList Fs = new ArrayList(); for(int i = 0; i < srs.Length; i++) { if(srs[i].F0 == curFill) { // add use for F0 ShapeRecord curSr = new ShapeRecord(); curSr.Fill = orderedFills.IndexOf(f); curSr.Stroke = srs[i].S; curSr.Path = i; Fs.Add(curSr); } if(srs[i].F1 == curFill ) { // add use for F1 ShapeRecord curSr = new ShapeRecord(); curSr.Fill = orderedFills.IndexOf(f); curSr.Stroke = 0; curSr.Path = i; Fs.Add(curSr); } } //now sort the F1s from tip to tail if(Fs.Count > 0) { ArrayList finalFs = new ArrayList(); finalFs.Add(Fs[0]); PointF end = ((PathCollection)paths[((ShapeRecord)Fs[0]).Path]).LastPoint; Fs.RemoveAt(0); while(Fs.Count > 0) { bool found = false; foreach(ShapeRecord sr in Fs) { PathCollection srp = (PathCollection)paths[sr.Path]; if(srp.FirstPoint == end) { end = srp.LastPoint; finalFs.Add(sr); Fs.Remove(sr); found = true; break; } } if(found == false) { finalFs.Add(Fs[0]); end = ( (PathCollection)paths[ ((ShapeRecord)Fs[0]).Path] ).LastPoint; Fs.RemoveAt(0); } } // and write them foreach(ShapeRecord sr in finalFs) { shapeRecords.Add(sr); } } } } for(int i = 0; i < srs.Length; i++) { if(srs[i].F0 == 0 && srs[i].F1 == 0) { // must be stroke ShapeRecord curSr = new ShapeRecord(); curSr.Fill = 0; curSr.Stroke = srs[i].S; curSr.Path = i; shapeRecords.Add(curSr); } } // convert to array ShapeRecord[] srecs = new ShapeRecord[shapeRecords.Count]; for(int i = 0; i < shapeRecords.Count; i++) { srecs[i] = (ShapeRecord)shapeRecords[i]; } writer.WriteUses(srecs); }