/// <summary> /// Adds a set of spells to this Effect's AffectMask, which is used to determine whether /// a certain Spell and this effect have some kind of influence on one another (for procs, talent modifiers etc). /// Usually the mask also contains any spell that is triggered by the original spell. /// /// If you get a warning that the wrong set is affected, use AddAffectingSpells instead. /// </summary> public void AddToAffectMask(params SpellLineId[] abilities) { uint[] mask = new uint[3]; if (abilities.Length != 1) { foreach (SpellLineId ability in abilities) { Spell firstRank = ability.GetLine().FirstRank; for (int index = 0; index < 3; ++index) { mask[index] |= firstRank.SpellClassMask[index]; } } } else { abilities[0].GetLine().FirstRank.SpellClassMask.CopyTo(mask, 0); } HashSet <SpellLine> affectedSpellLines = SpellHandler.GetAffectedSpellLines(Spell.ClassId, mask); if (affectedSpellLines.Count != abilities.Length) { LogManager.GetCurrentClassLogger().Warn( "[SPELL Inconsistency for {0}] Invalid affect mask affects a different set than the one intended: {1} (intended: {2}) - You might want to use AddAffectingSpells instead!", Spell, affectedSpellLines.ToString(", "), abilities.ToString(", ")); } for (int index = 0; index < 3; ++index) { AffectMask[index] |= mask[index]; } }
private static void CheckInputFileFormat(WaveFormat format, double startsAt) { if (!AcceptedSampleRates.Contains(format.SampleRate)) { throw new ArgumentException(String.Format( "Sample rate of the given file is not supported {0}. Supported sample rates {1}. " + "Submit a github request if you need a different sample rate to be supported.", format, AcceptedSampleRates.ToString()) ); } if (!AcceptedBitsPerSample.Contains(format.BitsPerSample)) { throw new ArgumentException(String.Format( "Bad file format {format}. Bits per sample ({format.BitsPerSample}) is less than accepted range.", format, format.BitsPerSample )); } if (!AcceptedChannels.Contains(format.Channels)) { throw new ArgumentException(String.Format( "Bad file format {format}. Number of channels is not in the accepted range.", format)); } if (startsAt > format.LengthInSeconds) { throw new ArgumentException(String.Format( "Could not start reading from {startsAt} second, since input file is shorter {format.LengthInSeconds}", startsAt, format.LengthInSeconds)); } }
public virtual string SimplifyFile(string content) { HashSet <string> hookImports = new HashSet <string>(); var componentStarts = Regex.Matches(content, @"^(?<export>export )?(?<default>default )?class (?<className>\w+) extends React\.Component<(?<props>.*?)>\s*{\s*\r\n", RegexOptions.Multiline).Cast <Match>(); foreach (var m in componentStarts.Reverse()) { var endMatch = new Regex(@"^}\s*$", RegexOptions.Multiline).Match(content, m.EndIndex()); var simplifiedContent = SimplifyClass(content.Substring(m.EndIndex(), endMatch.Index - m.EndIndex()), hookImports); string newComponent = m.Groups["export"].Value + m.Groups["default"].Value + "function " + m.Groups["className"].Value + "(p : " + m.Groups["props"].Value + "){\r\n" + simplifiedContent + endMatch.Value; content = content.Substring(0, m.Index) + newComponent + content.Substring(endMatch.EndIndex()); } if (hookImports.Any()) { var lastImport = Regex.Matches(content, "^import.*\r\n", RegexOptions.Multiline).Cast <Match>().Last(); return(content.Substring(0, lastImport.EndIndex()) + $"import {{ {hookImports.ToString(", ")} }} from '@framework/Hooks'\r\n" + content.Substring(lastImport.EndIndex())); } else { return(content); } }
public override string ToString() { StringBuilder buffer = new StringBuilder("{"); List <KeyValuePair <int, FastSparseSetFactory <int> .FastSparseSet <int> > > lst = EntryList (); if (lst != null) { bool first = true; foreach (KeyValuePair <int, FastSparseSetFactory <int> .FastSparseSet <int> > entry in lst) { if (!first) { buffer.Append(", "); } else { first = false; } HashSet <int> set = entry.Value.ToPlainSet(); buffer.Append(entry.Key).Append("={").Append(set.ToString()).Append("}"); } } buffer.Append("}"); return(buffer.ToString()); }
private void ThrowIfConfigurationHasAdditionalExcludes(IEnumerable <ProjectItemElement> configurationOutput, IEnumerable <ProjectItemElement> nonConfigurationOutput) { foreach (var item1 in configurationOutput) { if (item1 == null) { continue; } var item2Excludes = new HashSet <string>(); foreach (var item2 in nonConfigurationOutput) { if (item2 != null) { item2Excludes.UnionWith(item2.Excludes()); } } var configurationHasAdditionalExclude = item1.Excludes().Any(exclude => item2Excludes.All(item2Exclude => item2Exclude != exclude)); if (configurationHasAdditionalExclude) { MigrationTrace.Instance.WriteLine(item1.Exclude); MigrationTrace.Instance.WriteLine(item2Excludes.ToString()); MigrationErrorCodes.MIGRATE20012("Unable to migrate projects with excluded files in configurations.") .Throw(); } } }
static void Main(string[] args) { HashSet <int> ts = new HashSet <int>(); ts.Add(5); ts.Add(10); ts.Add(20); ts.Add(15); foreach (int item in ts) { Console.WriteLine(item); } HashSet <int> hs2 = new HashSet <int>(); hs2.Add(15); hs2.Add(20); hs2.Add(21); HashSet <int> hs3 = new HashSet <int>(); hs3.IsSubsetOf(hs2); Console.WriteLine(new string('-', 30)); Console.WriteLine(hs2.IsSubsetOf(ts)); Console.WriteLine(new string('-', 30)); Console.WriteLine(hs2.ToString()); Console.ReadKey(); }
public string FPGrow() { if (tree == null) { throw new Exception("tree is null"); } HashSet <ItemsSet <String> > result = tree.FPGrow(epsilon); //empezamos la trad HashSet <String> array = new HashSet <String>(); foreach (ItemsSet <String> i in result) { foreach (String ii in i) { array.Add(ii); } } Dictionary <String, String> dic = new Dictionary <string, string>(); StreamReader lector = new StreamReader("productos.txt"); string linea = lector.ReadLine(); while (linea != null && array.Count != 0) { string[] producto = linea.Split('|'); if (array.Contains(producto[0])) { dic.Add(producto[0], producto[1].Substring(1, producto[1].Length - 1)); array.Remove(producto[0]); } linea = lector.ReadLine(); } String s = result.ToString(); String sb = ""; foreach (ItemsSet <String> i in result) { sb += "{"; foreach (String ii in i) { string valor = ""; dic.TryGetValue(ii, out valor); sb += valor; sb += ", "; } sb = sb.Substring(0, sb.Length - 2); sb += "}"; sb += Environment.NewLine; } //termina la trad return(sb.ToString()); }
public void GetNamesTest2() { AbstractSpreadsheet s = new Spreadsheet(); HashSet <string> test = new HashSet <string>(); HashSet <string> names = new HashSet <string>(s.GetNamesOfAllNonemptyCells()); Assert.AreEqual(test.ToString(), names.ToString()); }
public void AddResultType(AddressComponentTypes value) { if (AcceptableResultTypes.Contains(value)) { result_type.Add(value); SetQuery(nameof(result_type), result_type?.ToString("|")); } }
public void AddLocationType(GeometryLocationType value) { if (value != GeometryLocationType.Unknown) { location_type.Add(value); SetQuery(nameof(location_type), location_type?.ToString("|")); } }
private static void ShowValues(HashSet <int> set) { foreach (int i in set) { Console.Write("{0}{1}", i, set.ToString().EndsWith(i.ToString()) ? "" : " "); } Console.WriteLine(); }
public override string ConvertToString(object value, WriteSettings settings) { HashSet <PageStyleClass> data = value as HashSet <PageStyleClass>; if (data != null) { return(string.Join(" ", data.ToString())); } return(null); }
/// <summary> /// /// </summary> /// <returns></returns> public static string ToStringTableString() { string stringTableString; lock (Tasks) { stringTableString = Tasks.ToString(Columns); } return(stringTableString); }
public virtual void TestNullKeys() { JobConf conf = new JobConf(typeof(TestMapRed)); FileSystem fs = FileSystem.GetLocal(conf); HashSet <string> values = new HashSet <string>(); string m = "AAAAAAAAAAAAAA"; for (int i = 1; i < 11; ++i) { values.AddItem(m); m = m.Replace((char)('A' + i - 1), (char)('A' + i)); } Path testdir = new Path(Runtime.GetProperty("test.build.data", "/tmp")).MakeQualified (fs); fs.Delete(testdir, true); Path inFile = new Path(testdir, "nullin/blah"); SequenceFile.Writer w = SequenceFile.CreateWriter(fs, conf, inFile, typeof(NullWritable ), typeof(Text), SequenceFile.CompressionType.None); Text t = new Text(); foreach (string s in values) { t.Set(s); w.Append(NullWritable.Get(), t); } w.Close(); FileInputFormat.SetInputPaths(conf, inFile); FileOutputFormat.SetOutputPath(conf, new Path(testdir, "nullout")); conf.SetMapperClass(typeof(TestMapRed.NullMapper)); conf.SetReducerClass(typeof(IdentityReducer)); conf.SetOutputKeyClass(typeof(NullWritable)); conf.SetOutputValueClass(typeof(Text)); conf.SetInputFormat(typeof(SequenceFileInputFormat)); conf.SetOutputFormat(typeof(SequenceFileOutputFormat)); conf.SetNumReduceTasks(1); conf.Set(MRConfig.FrameworkName, MRConfig.LocalFrameworkName); JobClient.RunJob(conf); // Since null keys all equal, allow any ordering SequenceFile.Reader r = new SequenceFile.Reader(fs, new Path(testdir, "nullout/part-00000" ), conf); m = "AAAAAAAAAAAAAA"; for (int i_1 = 1; r.Next(NullWritable.Get(), t); ++i_1) { NUnit.Framework.Assert.IsTrue("Unexpected value: " + t, values.Remove(t.ToString( ))); m = m.Replace((char)('A' + i_1 - 1), (char)('A' + i_1)); } NUnit.Framework.Assert.IsTrue("Missing values: " + values.ToString(), values.IsEmpty ()); }
public void SetContentTest16() { AbstractSpreadsheet s = new Spreadsheet(); s.SetContentsOfCell("e1", "puppy"); s.SetContentsOfCell("e5", string.Empty); HashSet <string> test = new HashSet <string>() { "e1" }; HashSet <string> names = new HashSet <string>(s.GetNamesOfAllNonemptyCells()); Assert.AreEqual(test.ToString(), names.ToString()); }
public List <Coordinate> GetShortestPath(Coordinate start, Coordinate goal) { HashSet <Coordinate> visited = new HashSet <Coordinate>(); Dictionary <Coordinate, Coordinate> parents = new Dictionary <Coordinate, Coordinate>(); Dictionary <Coordinate, double> gScore = new Dictionary <Coordinate, double>(); HeapPriorityQueue <Coordinate> fScoreQueue = new HeapPriorityQueue <Coordinate>(rows * cols); parents[start] = start; gScore.Add(start, 0); fScoreQueue.Enqueue(start, gScore[start] + Heuristic(start, goal)); while (fScoreQueue.Count() != 0) { Coordinate current = fScoreQueue.Dequeue(); Console.Out.WriteLine(""); Console.Out.WriteLine("Current = " + current.ToString()); Console.Out.WriteLine("Visited = " + visited.ToString()); Console.Out.WriteLine("Parents = " + parents.ToString()); Console.Out.WriteLine("gScore = " + gScore.ToString()); Console.Out.WriteLine("fScoreQueue = " + fScoreQueue.ToString()); if (current == goal) { return(ReconstructPath(parents, goal)); } visited.Add(start); foreach (Coordinate neighbor in board[current.row, current.col].GetNeighborCoordinates()) { if (visited.Contains(neighbor)) { continue; } double newGScore = gScore[current] + Distance(current, neighbor); if (!fScoreQueue.Contains(neighbor)) { parents[neighbor] = current; gScore[neighbor] = newGScore; fScoreQueue.Enqueue(neighbor, newGScore + Heuristic(neighbor, goal)); } else if (newGScore < gScore[neighbor]) { parents[neighbor] = current; gScore[neighbor] = newGScore; fScoreQueue.UpdatePriority(neighbor, newGScore + Heuristic(neighbor, goal)); } } } return(null); }
public void GetNamesTest1() { AbstractSpreadsheet s = new Spreadsheet(); s.SetContentsOfCell("t6", "1"); s.SetContentsOfCell("err5", "sheep"); s.SetContentsOfCell("ces1994", "= 8/8"); HashSet <string> test = new HashSet <string>() { "t6", "err5", "ces1994" }; HashSet <string> names = new HashSet <string>(s.GetNamesOfAllNonemptyCells()); Assert.AreEqual(test.ToString(), names.ToString()); }
public void GameStart() { this.timer = 0; if (!ingamePlayers.Contains(playerID)) { ingamePlayers.Add(playerID); } if (!roomDict.ContainsKey(playerID)) { roomDict.Add(playerID, new List <string>()); } gameRunning = true; MonklandSteamManager.DataPacket packet = MonklandSteamManager.instance.GetNewPacket(WORLD_CHANNEL, WorldHandler); BinaryWriter writer = MonklandSteamManager.instance.GetWriterForPacket(packet); //Write message type writer.Write(Convert.ToByte(0)); writer.Write(true); MonklandSteamManager.instance.FinalizeWriterToPacket(writer, packet); //MonklandSteamManager.instance.SendPacket(packet, (CSteamID)managerID), EP2PSend.k_EP2PSendReliable); MonklandSteamManager.instance.SendPacketToAll(packet, true, EP2PSend.k_EP2PSendReliable); if (isManager) { foreach (ulong pl in ingamePlayers) { if (pl != playerID) { SyncCycle((CSteamID)pl); } syncDelay = 1000; } } MonklandSteamManager.Log("GameStart Packet: " + ingamePlayers.ToString() + "\n" + ingamePlayers.Count + " players ingame."); }
public List<Coordinate> GetShortestPath(Coordinate start, Coordinate goal) { HashSet<Coordinate> visited = new HashSet<Coordinate>(); Dictionary<Coordinate, Coordinate> parents = new Dictionary<Coordinate, Coordinate>(); Dictionary<Coordinate, double> gScore = new Dictionary<Coordinate, double>(); HeapPriorityQueue<Coordinate> fScoreQueue = new HeapPriorityQueue<Coordinate>(rows * cols); parents[start] = start; gScore.Add(start, 0); fScoreQueue.Enqueue(start, gScore[start] + Heuristic(start, goal)); while (fScoreQueue.Count() != 0) { Coordinate current = fScoreQueue.Dequeue(); Console.Out.WriteLine(""); Console.Out.WriteLine("Current = " + current.ToString()); Console.Out.WriteLine("Visited = " + visited.ToString()); Console.Out.WriteLine("Parents = " + parents.ToString()); Console.Out.WriteLine("gScore = " + gScore.ToString()); Console.Out.WriteLine("fScoreQueue = " + fScoreQueue.ToString()); if (current == goal) { return ReconstructPath(parents, goal); } visited.Add(start); foreach (Coordinate neighbor in board[current.row,current.col].GetNeighborCoordinates()) { if (visited.Contains(neighbor)) continue; double newGScore = gScore[current] + Distance(current, neighbor); if (!fScoreQueue.Contains(neighbor)) { parents[neighbor] = current; gScore[neighbor] = newGScore; fScoreQueue.Enqueue(neighbor, newGScore + Heuristic(neighbor, goal)); } else if (newGScore < gScore[neighbor]) { parents[neighbor] = current; gScore[neighbor] = newGScore; fScoreQueue.UpdatePriority(neighbor, newGScore + Heuristic(neighbor, goal)); } } } return null; }
/// <summary> /// Demonstrate the HashSet datatype /// </summary> private static void DemonstrateHashSet() { HashSet <String> myHashSet = new HashSet <String>(); myHashSet.Add("Mushroom"); myHashSet.Add("Pepperoni"); myHashSet.Add("Green Olives"); myHashSet.Add("Black Olives"); myHashSet.Add("Anchovies"); Console.WriteLine("HashSet = " + myHashSet.ToString()); // Yucky. Not what we want HashSetWrapper <String> myHashSetInWrapper = new HashSetWrapper <String>(); myHashSetInWrapper.Add("Mushroom"); myHashSetInWrapper.Add("Pepperoni"); myHashSetInWrapper.Add("Green Olives"); myHashSetInWrapper.Add("Black Olives"); myHashSetInWrapper.Add("Anchovies"); Console.WriteLine("HashSet = " + myHashSetInWrapper.ToString()); // Yucky. Not what we want }
public virtual string SimplifyClass(string content, HashSet <string> hookImports) { HashSet <string> hooks = new HashSet <string>(); var matches = Regex.Matches(content, @"^ (?<text>\w.+)\s*\r\n", RegexOptions.Multiline).Cast <Match>().ToList(); var endMatch = new Regex(@"^ };?\s*$", RegexOptions.Multiline).Matches(content).Cast <Match>().ToList(); var pairs = matches.Select(m => new { isStart = true, m }) .Concat(endMatch.Select(m => new { isStart = false, m })) .OrderBy(p => p.m !.Index) .BiSelectC((start, end) => (start: start !, end: end !)) .Where(a => a.start.isStart && !a.end.isStart) .Select(a => (start: a.start.m, end: a.end.m)) .ToList(); string?render = null; foreach (var p in pairs.AsEnumerable().Reverse()) { var methodContent = content.Substring(p.start.EndIndex(), p.end.Index - p.start.EndIndex()); var simplifiedContent = SimplifyMethod(methodContent, hooks, hookImports); if (p.start.Value.Contains("render()")) { render = simplifiedContent.Lines().Select(l => l.StartsWith(" ") ? l.Substring(2) : l).ToString("\r\n"); content = content.Substring(0, p.start.Index) + content.Substring(p.end.EndIndex()); } else { string newComponent = ConvertToFunction(p.start.Value) + simplifiedContent + p.end.Value; content = content.Substring(0, p.start.Index) + newComponent + content.Substring(p.end.EndIndex()); } } return(hooks.ToString(s => s + ";\r\n", "").Indent(2) + content + render); }
public static void LoadFolder(string root) { string[] allpath = Directory.GetFiles(root, "*", SearchOption.AllDirectories); foreach (string path in allpath) { if (path.EndsWith("File.txt")) { continue; } string fn = Path.GetFileNameWithoutExtension(path); string[] trace = path.Split(Path.DirectorySeparatorChar); string parent = ""; for (int i = 0; i < trace.Length - 1; i++) { parent += trace[i] + "\\"; Debug.WriteLine("File [" + fn + "] From parent : " + parent); string[] lines = File.ReadAllLines(parent + "File.txt"); foreach (string line in lines) { string[] perm = line.Split('#'); if (perm[0] == "Y") { AddNumbers(fn, perm[1]); } else if (perm[0] == "N") { if (i == trace.Length - 2) { HashSet <int> added = AddNumbers(fn, perm[1]); Debug.WriteLine("Added " + added.ToString()); } } else { } } } } }
public void GetTasks() { List <Task> taskList = new List <Task>(); //expected tasks within list Task t1 = new Task("Codecon", 1, "Mike", "Do The First Thing", "The thing that needs to be done first will be done first"); Task t2 = new Task("Codecon", 2, "Jeff", "Do The Second Thing", "The thing that needs to be done second will be done second"); Task t3 = new Task("Codecon", 3, "Slater", "Do The Third Thing", "The thing that needs to be done third will be done third"); Task t4 = new Task("Codecon", 4, "Garrett", "Do The Right Thing", "There is reddit karma in it for you"); //create the expected list taskList.Add(t1); taskList.Add(t2); taskList.Add(t3); taskList.Add(t4); List <Task> testReturn = EngineUnderTest.GetTasks("Codecon"); //prepare for comparison HashSet <Task> input = new HashSet <Task>(taskList); HashSet <Task> output = new HashSet <Task>(testReturn); //need to implement comparison on our custom type //to string is the settling point Assert.AreEqual(input.ToString(), output.ToString()); }
public void TestNumberSet() { HashSet <int> a = new HashSet <int> { 1, 2, 3 }; HashSet <int> b = new HashSet <int> { 2, 5 }; Assert.AreEqual("{1, 2, 3}", a.to_s()); Assert.AreEqual("{2, 5}", b.to_s()); Assert.AreEqual("System.Collections.Generic.HashSet`1[System.Int32]", a.ToString()); Assert.AreEqual("{}", new HashSet <int>().to_s()); HashSet <int> d = new HashSet <int> { 1, 2, 3 }; d.IntersectWith(b); Assert.AreEqual("{2}", d.to_s()); Assert.AreEqual(true, d.SetEquals(new HashSet <int> { 2 })); HashSet <int> c = a.intersection(b); Assert.AreEqual("{2}", c.to_s()); Assert.AreEqual("{1, 2, 3}", a.to_s()); Assert.AreEqual(new int[] { 1, 2, 3 }, a.to_a()); Assert.AreEqual(new int[] { 3, 5, 1 }, new HashSet <int> { 3, 5, 1 }.to_a()); c.Remove(2); c.Add(7); Assert.AreEqual(new int[] { 7 }, c.to_a()); }
public void Left_Join_Test() { //arrange MyHashTable <string> leftHT = new MyHashTable <string>(1024); leftHT.Add("Thunderous", "Loud"); leftHT.Add("Icy", "Cold"); leftHT.Add("Quick", "Fast"); MyHashTable <string> rightHT = new MyHashTable <string>(1024); rightHT.Add("Thunderous", "quiet"); rightHT.Add("Icy", "warm"); //act HashSet <string[]> actual = HashTableChallenges.LeftJoin(leftHT, rightHT); HashSet <string[]> expected = new HashSet <string[]>(); expected.Add(new string[] { "Icy", "Cold", "warm" }); expected.Add(new string[] { "Quick", "Fast", "NULL" }); expected.Add(new string[] { "Thunderous", "Loud", "quiet" }); //assert Assert.Equal(expected.ToString(), actual.ToString()); }
public void TestToString() { HashSet<Object> s = new HashSet<Object>(); s.Add(s); String result = s.ToString(); Assert.IsTrue(result.IndexOf("(this") > -1, "should contain self ref"); }
public void addTagsToEntry(long newEntryID, HashSet<string> tags) { Console.WriteLine("{0}, {1}", newEntryID, tags.ToString()); try { var connectionString = String.Format("Data Source={0};Version=3;", DB); using (var conn = new SQLiteConnection(connectionString)) { conn.Open(); using (var cmd = conn.CreateCommand()) { foreach (var item in tags) { //cmd.CommandText = String.Format(@"INSERT INTO Entry_Tag (EntryID, TagID) VALUES ('{0}', (SELECT TagID FROM Tag WHERE Name = '{1}'))", newEntryID, item); cmd.CommandText = @"INSERT INTO Entry_Tag (EntryID, TagID) VALUES (@NewEntryID, (SELECT TagID FROM Tag WHERE Name = @Item))"; SQLiteParameter pNewEntryID = new SQLiteParameter { ParameterName = "@NewEntryID", Value = newEntryID }; cmd.Parameters.Add(pNewEntryID); SQLiteParameter pItem = new SQLiteParameter { ParameterName = "@Item", Value = item }; cmd.Parameters.Add(pItem); cmd.CommandType = CommandType.Text; tracer.PutSQLQuery(cmd, 17); cmd.ExecuteNonQuery(); } } } } catch (System.IO.IOException exception) { Console.WriteLine(String.Format("{0}: {1}", exception.Source, exception.Message)); } catch (System.Data.SQLite.SQLiteException exception) { Console.WriteLine(String.Format("{0}: {1}", exception.Source, exception.Message)); } }
public override string ToString() => $"({hashSet.ToString(" ^^ ")})";
private static Assembly CompileAssembly(FileReference OutputAssemblyPath, HashSet <FileReference> SourceFileNames, List <string> ReferencedAssembies, List <string> PreprocessorDefines = null, bool TreatWarningsAsErrors = false) { TempFileCollection TemporaryFiles = new TempFileCollection(); // Setup compile parameters CompilerParameters CompileParams = new CompilerParameters(); { // Always compile the assembly to a file on disk, so that we can load a cached version later if we have one CompileParams.GenerateInMemory = false; // This is the full path to the assembly file we're generating CompileParams.OutputAssembly = OutputAssemblyPath.FullName; // We always want to generate a class library, not an executable CompileParams.GenerateExecutable = false; // Never fail compiles for warnings CompileParams.TreatWarningsAsErrors = false; // Set the warning level so that we will actually receive warnings - // doesn't abort compilation as stated in documentation! CompileParams.WarningLevel = 4; // Always generate debug information as it takes minimal time CompileParams.IncludeDebugInformation = true; #if !DEBUG // Optimise the managed code in Development CompileParams.CompilerOptions += " /optimize"; #endif Log.TraceVerbose("Compiling " + OutputAssemblyPath); // Keep track of temporary files emitted by the compiler so we can clean them up later CompileParams.TempFiles = TemporaryFiles; // Warnings as errors if desired CompileParams.TreatWarningsAsErrors = TreatWarningsAsErrors; // Add assembly references { if (ReferencedAssembies == null) { // Always depend on the CLR System assembly CompileParams.ReferencedAssemblies.Add("System.dll"); CompileParams.ReferencedAssemblies.Add("System.Core.dll"); } else { // Add in the set of passed in referenced assemblies CompileParams.ReferencedAssemblies.AddRange(ReferencedAssembies.ToArray()); } // The assembly will depend on this application Assembly UnrealBuildToolAssembly = Assembly.GetExecutingAssembly(); CompileParams.ReferencedAssemblies.Add(UnrealBuildToolAssembly.Location); // The assembly will depend on the utilities assembly. Find that assembly // by looking for the one that contains a common utility class Assembly UtilitiesAssembly = Assembly.GetAssembly(typeof(FileReference)); CompileParams.ReferencedAssemblies.Add(UtilitiesAssembly.Location); } // Add preprocessor definitions if (PreprocessorDefines != null && PreprocessorDefines.Count > 0) { CompileParams.CompilerOptions += " /define:"; for (int DefinitionIndex = 0; DefinitionIndex < PreprocessorDefines.Count; ++DefinitionIndex) { if (DefinitionIndex > 0) { CompileParams.CompilerOptions += ";"; } CompileParams.CompilerOptions += PreprocessorDefines[DefinitionIndex]; } } // @todo: Consider embedding resources in generated assembly file (version/copyright/signing) } // Create the output directory if it doesn't exist already DirectoryInfo DirInfo = new DirectoryInfo(OutputAssemblyPath.Directory.FullName); if (!DirInfo.Exists) { try { DirInfo.Create(); } catch (Exception Ex) { throw new BuildException(Ex, "Unable to create directory '{0}' for intermediate assemblies (Exception: {1})", OutputAssemblyPath, Ex.Message); } } // Compile the code CompilerResults CompileResults; try { Dictionary <string, string> ProviderOptions = new Dictionary <string, string>() { { "CompilerVersion", "v4.0" } }; CSharpCodeProvider Compiler = new CSharpCodeProvider(ProviderOptions); CompileResults = Compiler.CompileAssemblyFromFile(CompileParams, SourceFileNames.Select(x => x.FullName).ToArray()); } catch (Exception Ex) { throw new BuildException(Ex, "Failed to launch compiler to compile assembly from source files:\n {0}\n(Exception: {1})", String.Join("\n ", SourceFileNames), Ex.ToString()); } // Display compilation warnings and errors if (CompileResults.Errors.Count > 0) { Log.TraceInformation("While compiling {0}:", OutputAssemblyPath); foreach (CompilerError CurError in CompileResults.Errors) { Log.WriteLine(0, CurError.IsWarning? LogEventType.Warning : LogEventType.Error, LogFormatOptions.NoSeverityPrefix, "{0}", CurError.ToString()); } if (CompileResults.Errors.HasErrors || TreatWarningsAsErrors) { throw new BuildException("Unable to compile source files."); } } // Grab the generated assembly Assembly CompiledAssembly = CompileResults.CompiledAssembly; if (CompiledAssembly == null) { throw new BuildException("UnrealBuildTool was unable to compile an assembly for '{0}'", SourceFileNames.ToString()); } // Clean up temporary files that the compiler saved TemporaryFiles.Delete(); return(CompiledAssembly); }
public virtual void TestGetFieldNames() { Directory d = NewDirectory(); // set up writer IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); Document doc = new Document(); FieldType customType3 = new FieldType(); customType3.Stored = true; doc.Add(new StringField("keyword", "test1", Field.Store.YES)); doc.Add(new TextField("text", "test1", Field.Store.YES)); doc.Add(new Field("unindexed", "test1", customType3)); doc.Add(new TextField("unstored", "test1", Field.Store.NO)); writer.AddDocument(doc); writer.Dispose(); // set up reader DirectoryReader reader = DirectoryReader.Open(d); FieldInfos fieldInfos = MultiFields.GetMergedFieldInfos(reader); Assert.IsNotNull(fieldInfos.FieldInfo("keyword")); Assert.IsNotNull(fieldInfos.FieldInfo("text")); Assert.IsNotNull(fieldInfos.FieldInfo("unindexed")); Assert.IsNotNull(fieldInfos.FieldInfo("unstored")); reader.Dispose(); // add more documents writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMergePolicy(NewLogMergePolicy())); // want to get some more segments here int mergeFactor = ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor; for (int i = 0; i < 5 * mergeFactor; i++) { doc = new Document(); doc.Add(new StringField("keyword", "test1", Field.Store.YES)); doc.Add(new TextField("text", "test1", Field.Store.YES)); doc.Add(new Field("unindexed", "test1", customType3)); doc.Add(new TextField("unstored", "test1", Field.Store.NO)); writer.AddDocument(doc); } // new fields are in some different segments (we hope) for (int i = 0; i < 5 * mergeFactor; i++) { doc = new Document(); doc.Add(new StringField("keyword2", "test1", Field.Store.YES)); doc.Add(new TextField("text2", "test1", Field.Store.YES)); doc.Add(new Field("unindexed2", "test1", customType3)); doc.Add(new TextField("unstored2", "test1", Field.Store.NO)); writer.AddDocument(doc); } // new termvector fields FieldType customType5 = new FieldType(TextField.TYPE_STORED); customType5.StoreTermVectors = true; FieldType customType6 = new FieldType(TextField.TYPE_STORED); customType6.StoreTermVectors = true; customType6.StoreTermVectorOffsets = true; FieldType customType7 = new FieldType(TextField.TYPE_STORED); customType7.StoreTermVectors = true; customType7.StoreTermVectorPositions = true; FieldType customType8 = new FieldType(TextField.TYPE_STORED); customType8.StoreTermVectors = true; customType8.StoreTermVectorOffsets = true; customType8.StoreTermVectorPositions = true; for (int i = 0; i < 5 * mergeFactor; i++) { doc = new Document(); doc.Add(new TextField("tvnot", "tvnot", Field.Store.YES)); doc.Add(new Field("termvector", "termvector", customType5)); doc.Add(new Field("tvoffset", "tvoffset", customType6)); doc.Add(new Field("tvposition", "tvposition", customType7)); doc.Add(new Field("tvpositionoffset", "tvpositionoffset", customType8)); writer.AddDocument(doc); } writer.Dispose(); // verify fields again reader = DirectoryReader.Open(d); fieldInfos = MultiFields.GetMergedFieldInfos(reader); ICollection<string> allFieldNames = new HashSet<string>(); ICollection<string> indexedFieldNames = new HashSet<string>(); ICollection<string> notIndexedFieldNames = new HashSet<string>(); ICollection<string> tvFieldNames = new HashSet<string>(); foreach (FieldInfo fieldInfo in fieldInfos) { string name = fieldInfo.Name; allFieldNames.Add(name); if (fieldInfo.Indexed) { indexedFieldNames.Add(name); } else { notIndexedFieldNames.Add(name); } if (fieldInfo.HasVectors()) { tvFieldNames.Add(name); } } Assert.IsTrue(allFieldNames.Contains("keyword")); Assert.IsTrue(allFieldNames.Contains("text")); Assert.IsTrue(allFieldNames.Contains("unindexed")); Assert.IsTrue(allFieldNames.Contains("unstored")); Assert.IsTrue(allFieldNames.Contains("keyword2")); Assert.IsTrue(allFieldNames.Contains("text2")); Assert.IsTrue(allFieldNames.Contains("unindexed2")); Assert.IsTrue(allFieldNames.Contains("unstored2")); Assert.IsTrue(allFieldNames.Contains("tvnot")); Assert.IsTrue(allFieldNames.Contains("termvector")); Assert.IsTrue(allFieldNames.Contains("tvposition")); Assert.IsTrue(allFieldNames.Contains("tvoffset")); Assert.IsTrue(allFieldNames.Contains("tvpositionoffset")); // verify that only indexed fields were returned Assert.AreEqual(11, indexedFieldNames.Count); // 6 original + the 5 termvector fields Assert.IsTrue(indexedFieldNames.Contains("keyword")); Assert.IsTrue(indexedFieldNames.Contains("text")); Assert.IsTrue(indexedFieldNames.Contains("unstored")); Assert.IsTrue(indexedFieldNames.Contains("keyword2")); Assert.IsTrue(indexedFieldNames.Contains("text2")); Assert.IsTrue(indexedFieldNames.Contains("unstored2")); Assert.IsTrue(indexedFieldNames.Contains("tvnot")); Assert.IsTrue(indexedFieldNames.Contains("termvector")); Assert.IsTrue(indexedFieldNames.Contains("tvposition")); Assert.IsTrue(indexedFieldNames.Contains("tvoffset")); Assert.IsTrue(indexedFieldNames.Contains("tvpositionoffset")); // verify that only unindexed fields were returned Assert.AreEqual(2, notIndexedFieldNames.Count); // the following fields Assert.IsTrue(notIndexedFieldNames.Contains("unindexed")); Assert.IsTrue(notIndexedFieldNames.Contains("unindexed2")); // verify index term vector fields Assert.AreEqual(4, tvFieldNames.Count, tvFieldNames.ToString()); // 4 field has term vector only Assert.IsTrue(tvFieldNames.Contains("termvector")); reader.Dispose(); d.Dispose(); }
private bool lasso_finding_test(HashSet<Arc> g, HashSet<Arc> h, int init){ if(!Head.ContainsKey(g.ToString())){ HashSet<int> H=new HashSet<int>(); foreach (Arc arc_g in g) { if(arc_g.From==init){ H.Add(arc_g.To); } } Head.Add(g.ToString(), H); } if(!Tail.ContainsKey(h.ToString())){ FiniteAutomaton fa=new FiniteAutomaton(); OneToOneTreeMap<int,FAState> st=new OneToOneTreeMap<int,FAState>(); foreach (Arc arc_h in h) { if(!st.containsKey(arc_h.From)) st.put(arc_h.From, fa.createState()); if(!st.containsKey(arc_h.To)) st.put(arc_h.To, fa.createState()); fa.addTransition(st.getValue(arc_h.From), st.getValue(arc_h.To), arc_h.Label?"1":"0"); } SCC s=new SCC(fa); HashSet<int> T=new HashSet<int>(); foreach (FAState state in s.getResult()) { T.Add(st.getKey(state)); } int TailSize=0; HashSet<Arc> isolatedArcs=h; while(TailSize!=T.Count){ TailSize = T.Count; HashSet<Arc> isolatedArcsTemp=new HashSet<Arc>(); foreach (Arc arc in isolatedArcs) { if(!T.Contains(arc.To)){ isolatedArcsTemp.Add(arc); }else{ T.Add(arc.From); } } isolatedArcs=isolatedArcsTemp; } Tail.Add(h.ToString(), T); } HashSet<int> intersection = new HashSet<int>(Head[g.ToString()]); //intersection.retainAll(Tail[h.ToString()]); intersection.IntersectWith(Tail[h.ToString()]); //if(debug){ // if(intersection.isEmpty()){ // //debug("g_graph:"+g+", Head: "+Head.get(g.ToString())); // //debug("h_graph:"+h+", Tail: "+Tail.get(h.ToString())); // } //} return intersection.Count > 0; }
static string problem60() { List<int> primes = Prime(20000); HashSet<Point> pairs = new HashSet<Point>(); for (int i = 0; i < primes.Count; i++ ) { for ( int j = 1 + 1; j < primes.Count; j++ ) { Point pair = new Point(primes[i], primes[j]); if (IsPair(pair.X, pair.Y)) pairs.Add(pair); } } for (int a = 0; a < primes.Count; a++ ) { int aa = primes[a]; for (int b = a + 1; b < primes.Count; b++ ) { int bb = primes[b]; if (!pairs.Contains(new Point(aa,bb))) continue; for (int c = b + 1; c < primes.Count; c++ ) { int cc = primes[c]; if (!pairs.Contains(new Point(aa, cc))) continue; if (!pairs.Contains(new Point(bb, cc))) continue; for (int d = c + 1; d < primes.Count; d++) { int dd = primes[d]; if (!pairs.Contains(new Point(aa, dd))) continue; if (!pairs.Contains(new Point(bb, dd))) continue; if (!pairs.Contains(new Point(cc, dd))) continue; for (int e = d + 1; e < primes.Count;e++ ) { int ee = primes[e]; if (!pairs.Contains(new Point(aa, ee))) continue; if (!pairs.Contains(new Point(bb, ee))) continue; if (!pairs.Contains(new Point(cc, ee))) continue; if (!pairs.Contains(new Point(dd, ee))) continue; Console.Write(aa.ToString() + " "); Console.Write(bb.ToString() + " "); Console.Write(cc.ToString() + " "); Console.Write(dd.ToString() + " "); Console.Write(ee.ToString() + "\n"); } } } } } return pairs.ToString(); }
public bool HasRequiredModules(HashSet <BitmaskOperation> enabled) { Debug.LogFormat("HasRequiredModules {0} of {1} is {2}", required_.ToString(), enabled.ToString(), required_.IsSubsetOf(enabled)); return(required_.IsSubsetOf(enabled)); }
static void Main(string[] args) { if (args.Length == 1 && args[0] == "-v") { ReflectionToStringExts.verbose = true; } // list Console.WriteLine("flat list"); var strList = new List <string>() { "one", "two", "three" }; Console.WriteLine("ToString: " + strList.ToString()); Console.WriteLine("ToStringExt: " + strList.ToStringExt()); Console.WriteLine(""); // list of list Console.WriteLine("List of lists"); var listOfLists = new List <List <string> >() { new List <string>() { "one", "two" }, new List <string>() { "three", "four" } }; Console.WriteLine("ToString: " + listOfLists.ToString()); Console.WriteLine("ToStringExt: " + listOfLists.ToStringExt()); Console.WriteLine(""); // list of list of list Console.WriteLine("List of Lists of Lists"); var listOfListsOfLists = new List <List <List <string> > >() { new List <List <string> >() { new List <string>() { "one", "two" }, new List <string>() { "three", "four" } }, new List <List <string> >() { new List <string>() { "five", "six" }, new List <string>() { "seven", "eight" } } }; Console.WriteLine("ToString: " + listOfListsOfLists); Console.WriteLine("ToStringExt: " + listOfListsOfLists.ToStringExt()); Console.WriteLine(""); // dictionary Console.WriteLine("Dictionary"); var flatDic = new Dictionary <int, string>() { [1] = "one", [2] = "two" }; Console.WriteLine("ToString: " + flatDic); Console.WriteLine("ToStringExt: " + flatDic.ToStringExt()); Console.WriteLine(""); // dictionary of dictionary Console.WriteLine("Dictionary of Dictionaries"); var dic2 = new Dictionary <int, Dictionary <string, string> >() { [1] = new Dictionary <string, string>() { ["one"] = "first", ["two"] = "second" }, [2] = new Dictionary <string, string>() { ["three"] = "third", ["four"] = "forth" } }; Console.WriteLine("ToString: " + dic2); Console.WriteLine("ToStringExt: " + dic2.ToStringExt()); Console.WriteLine(""); // dictionary of list Console.WriteLine("Dictionary of Lists"); var dicOfList = new Dictionary <int, List <string> >() { [1] = new List <string>() { "one", "two" }, [2] = new List <string>() { "three", "four" }, }; Console.WriteLine("ToString: " + dicOfList); Console.WriteLine("ToStringExt: " + dicOfList.ToStringExt()); Console.WriteLine(""); // list of dictionary Console.WriteLine("List of Dictionaries"); var listOfDic = new List <Dictionary <int, string> >() { new Dictionary <int, string>() { [1] = "one", [2] = "two" }, new Dictionary <int, string>() { [3] = "three", [4] = "four" } }; Console.WriteLine("ToString: " + listOfDic); Console.WriteLine("ToStringExt: " + listOfDic.ToStringExt()); Console.WriteLine(""); // HashSet Console.WriteLine("HashSet"); var hashSet = new HashSet <string>() { "one", "two", "three" }; Console.WriteLine("ToString: " + hashSet.ToString()); Console.WriteLine("ToStringExt: " + hashSet.ToStringExt()); Console.WriteLine(""); }
public void assertException(FaultException<cmisFaultType> actualException, HashSet<enumServiceException> expectedExceptions) { if (expectedExceptions == null || expectedExceptions.Count < 1) { return; } enumServiceException actualExceptionType = enumServiceException.runtime; bool found = false; if (actualException.Detail != null && actualException.Detail.Nodes != null) { foreach (XmlNode node in actualException.Detail.Nodes) { if (node != null && node.Name != null && node.Name.Equals("type") && node.InnerText != null) { try { actualExceptionType = (enumServiceException)Enum.Parse(typeof(enumServiceException), node.InnerText, true); found = true; } catch (Exception) { } } } } if (!found || !expectedExceptions.Contains(actualExceptionType)) { Assert.Fail("Received exception '" + actualExceptionType + "' is not in set of expected exceptions: " + expectedExceptions.ToString() + "'"); } }
private bool lasso_finding_test(HashSet <Arc> g, HashSet <Arc> h, int init) { if (!Head.ContainsKey(g.ToString())) { HashSet <int> H = new HashSet <int>(); foreach (Arc arc_g in g) { if (arc_g.From == init) { H.Add(arc_g.To); } } Head.Add(g.ToString(), H); } if (!Tail.ContainsKey(h.ToString())) { FiniteAutomaton fa = new FiniteAutomaton(); OneToOneTreeMap <int, FAState> st = new OneToOneTreeMap <int, FAState>(); foreach (Arc arc_h in h) { if (!st.containsKey(arc_h.From)) { st.put(arc_h.From, fa.createState()); } if (!st.containsKey(arc_h.To)) { st.put(arc_h.To, fa.createState()); } fa.addTransition(st.getValue(arc_h.From), st.getValue(arc_h.To), arc_h.Label ? "1" : "0"); } SCC s = new SCC(fa); HashSet <int> T = new HashSet <int>(); foreach (FAState state in s.getResult()) { T.Add(st.getKey(state)); } int TailSize = 0; HashSet <Arc> isolatedArcs = h; while (TailSize != T.Count) { TailSize = T.Count; HashSet <Arc> isolatedArcsTemp = new HashSet <Arc>(); foreach (Arc arc in isolatedArcs) { if (!T.Contains(arc.To)) { isolatedArcsTemp.Add(arc); } else { T.Add(arc.From); } } isolatedArcs = isolatedArcsTemp; } Tail.Add(h.ToString(), T); } HashSet <int> intersection = new HashSet <int>(Head[g.ToString()]); //intersection.retainAll(Tail[h.ToString()]); intersection.IntersectWith(Tail[h.ToString()]); //if(debug){ // if(intersection.isEmpty()){ // //debug("g_graph:"+g+", Head: "+Head.get(g.ToString())); // //debug("h_graph:"+h+", Tail: "+Tail.get(h.ToString())); // } //} return(intersection.Count > 0); }
public static DataTree <Polyline> ReciprocalFrame(this Mesh mesh, double angle = 10, double dist = 0.25, double width = 0.1) { DataTree <Polyline> polylines = new DataTree <Polyline>(); try { var display = new List <Line>(); double thickness = width; int[][] tv = mesh.GetNGonsTopoBoundaries(); HashSet <int> tvAll = mesh.GetAllNGonsTopoVertices(); HashSet <int> e = mesh.GetAllNGonEdges(tv); Line[] lines = mesh.GetAllNGonEdgesLines(e); Line[] lines1 = mesh.GetAllNGonEdgesLines(e); bool[] nakedV = mesh.GetNakedEdgePointStatus(); int[][] fe = mesh.GetNGonFacesEdges(tv); Plane[] planes = new Plane[lines.Length]; Vector3d[] vecs = new Vector3d[mesh.TopologyEdges.Count]; int j = 0; foreach (int i in e) { Line l = mesh.TopologyEdges.EdgeLine(i); Rhino.IndexPair ip = mesh.TopologyEdges.GetTopologyVertices(i); int v0 = mesh.TopologyVertices.MeshVertexIndices(ip.I)[0]; int v1 = mesh.TopologyVertices.MeshVertexIndices(ip.J)[0]; Vector3d vec = new Vector3d( (mesh.Normals[v0].X + mesh.Normals[v1].X) * 0.5, (mesh.Normals[v0].Y + mesh.Normals[v1].Y) * 0.5, (mesh.Normals[v0].Z + mesh.Normals[v1].Z) * 0.5 ); vec.Unitize(); vecs[j] = vec; if (mesh.TopologyEdges.GetConnectedFaces(i).Length == 2) { l.Transform(Transform.Rotation(Rhino.RhinoMath.ToRadians(angle), vec, l.PointAt(0.5))); } Vector3d cross = Vector3d.CrossProduct(l.Direction, vec); planes[j] = new Plane(l.PointAt(0.5), cross); cross.Unitize(); l.Transform(Transform.Translation(cross * thickness)); lines[j] = l; l.Transform(Transform.Translation(-cross * 2 * thickness)); lines1[j++] = l; } //ngon vertex edges int[][] connectedE = mesh.GetConnectedNGonEdgesToNGonTopologyVertices(tvAll, e); int[] allEArray = e.ToArray(); int[] allvArray = tvAll.ToArray(); Line[] linesCopy = new Line[lines.Length]; Line[] linesCopy1 = new Line[lines.Length]; Line[] linesCopyM = new Line[lines.Length]; Line[] linesCopyM1 = new Line[lines.Length]; // Line[] linesCopyMoved = new Line[lines.Length]; for (int i = 0; i < lines.Length; i++) { linesCopy[i] = new Line(lines[i].From, lines[i].To); linesCopy1[i] = new Line(lines1[i].From, lines1[i].To); linesCopyM[i] = new Line(lines[i].From + vecs[i] * dist, lines[i].To + vecs[i] * dist); linesCopyM1[i] = new Line(lines1[i].From + vecs[i] * dist, lines1[i].To + vecs[i] * dist); } for (int i = 0; i < connectedE.Length; i++) { //Defines planes int total = connectedE[i].Length; Plane[] projectionPlanes = new Plane[total]; int start = total - 1; for (j = start; j < start + total; j++) { int currentEdge = connectedE[i][j % total]; int localID = Array.IndexOf(allEArray, currentEdge); projectionPlanes[j - start] = planes[localID]; } //Intersect lines for (j = 0; j < connectedE[i].Length; j++) { int currentEdge = connectedE[i][j]; if (mesh.TopologyEdges.GetConnectedFaces(currentEdge).Length == 1) { continue; } int localID = Array.IndexOf(allEArray, currentEdge); Line currentLine = linesCopy[localID]; Line currentLine1 = linesCopy1[localID]; Line currentLineM = linesCopyM[localID]; Line currentLineM1 = linesCopyM1[localID]; IndexPair pair = mesh.TopologyEdges.GetTopologyVertices(currentEdge); double lineT, lineT1; Plane currentPlane = new Plane(projectionPlanes[j]); double flag = 1; //Check length Plane tempPlane = new Plane(currentPlane); Line temp2 = new Line(currentLine.From, currentLine.To); tempPlane.Origin += tempPlane.ZAxis * (thickness); Line temp = new Line(currentLine.From, currentLine.To); double tt; Rhino.Geometry.Intersect.Intersection.LinePlane(temp2, tempPlane, out tt); if (allvArray[i] == pair.I) { temp.From = temp2.PointAt(tt); } else { temp.To = temp2.PointAt(tt); } double lineLen0 = temp.Length; tempPlane = new Plane(currentPlane); temp2 = new Line(currentLine.From, currentLine.To); tempPlane.Origin += tempPlane.ZAxis * (-thickness); temp = new Line(currentLine.From, currentLine.To); Rhino.Geometry.Intersect.Intersection.LinePlane(temp2, tempPlane, out tt); if (allvArray[i] == pair.I) { temp.From = temp2.PointAt(tt); } else { temp.To = temp2.PointAt(tt); } double lineLen1 = temp.Length; //End Check Length if (lineLen1 < lineLen0) { flag = -1; } currentPlane.Origin += currentPlane.ZAxis * (flag * thickness); Rhino.Geometry.Intersect.Intersection.LinePlane(currentLine, currentPlane, out lineT); Rhino.Geometry.Intersect.Intersection.LinePlane(currentLine1, currentPlane, out lineT1); Rhino.Geometry.Intersect.Intersection.LinePlane(currentLineM, currentPlane, out lineT); Rhino.Geometry.Intersect.Intersection.LinePlane(currentLineM1, currentPlane, out lineT1); if (allvArray[i] == pair.I) { currentLine.From = currentLine.PointAt(lineT); currentLine1.From = currentLine1.PointAt(lineT1); currentLineM.From = currentLineM.PointAt(lineT); currentLineM1.From = currentLineM1.PointAt(lineT1); } else { currentLine.To = currentLine.PointAt(lineT); currentLine1.To = currentLine1.PointAt(lineT1); currentLineM.To = currentLineM.PointAt(lineT); currentLineM1.To = currentLineM1.PointAt(lineT1); } linesCopy[localID] = currentLine; linesCopy1[localID] = currentLine1; linesCopyM[localID] = currentLineM; linesCopyM1[localID] = currentLineM1; } } for (int i = 0; i < linesCopy.Length; i++) { if (mesh.TopologyEdges.GetConnectedFaces(allEArray[i]).Length == 1) { continue; } polylines.AddRange(new Polyline[] { new Polyline(new Point3d[] { linesCopy[i].From, linesCopy[i].To, linesCopy1[i].To, linesCopy1[i].From, linesCopy[i].From }), new Polyline(new Point3d[] { linesCopyM[i].From, linesCopyM[i].To, linesCopyM1[i].To, linesCopyM1[i].From, linesCopyM[i].From }) }, new GH_Path(i)); } //A = linesCopy; //B = linesCopy1; //C = linesCopyM; //D = linesCopyM1; //E = polylines; } catch (Exception e) { Rhino.RhinoApp.WriteLine(e.ToString()); } return(polylines); }