public static void Parse(SortedSet<string> list, Expression expr) { if (expr == null) return; BinaryExpression eb = expr as BinaryExpression; MemberExpression em = expr as MemberExpression; UnaryExpression eu = expr as UnaryExpression; MethodCallExpression ec = expr as MethodCallExpression; if (em != null) // member expression { list.Add(em.Member.Name); } else if (eb != null) // binary expression { Parse(list, eb.Left); Parse(list, eb.Right); } else if (eu != null) // unary expression { Parse(list, eu.Operand); } else if (ec != null) // call expression { foreach (var a in ec.Arguments) Parse(list, a); } return; }
private static void ReadStudentsInfoFromFile(string filePath, SortedDictionary<string, SortedSet<Student>> courses) { using (StreamReader reader = new StreamReader(filePath)) { string line = reader.ReadLine(); while (line != string.Empty && line != null) { string[] data = line.Split('|'); string firstName = data[0].Trim(); string lastName = data[1].Trim(); string courseName = data[2].Trim(); if (courses.ContainsKey(courseName)) { courses[courseName].Add(new Student(firstName, lastName)); } else { courses[courseName] = new SortedSet<Student>(); courses[courseName].Add(new Student(firstName, lastName)); } line = reader.ReadLine(); } } }
public FlatXml Parse(XmlReader reader) { var nodes = new SortedSet<FlatXmlNode>(); var position = 0; while (reader.Read()) { switch (reader.NodeType) { case XmlNodeType.Element: var element = new FlatXmlNode {Name = reader.Name, Depth = reader.Depth, Position = position}; nodes.Add(element); if (reader.HasAttributes) { while (reader.MoveToNextAttribute()) { position += 1; var attribute = new FlatXmlNode {Name = reader.Name, Value = reader.Value, Position = position, Depth = reader.Depth}; nodes.Add(attribute); } } break; case XmlNodeType.Text: var t = new FlatXmlNode {Value = reader.Value, Depth = reader.Depth, Position = position}; nodes.Add(t); break; } position += 1; } return new FlatXml(nodes); }
private static void FindMinimumSpanningTree(bool[] used, SortedSet<Edge> priority, List<Edge> mpdNodes, List<Edge> edges) { while (priority.Count > 0) { var edge = priority.Min; priority.Remove(edge); if (!used[edge.EndNode]) { used[edge.EndNode] = true; mpdNodes.Add(edge); for (int i = 0; i < edges.Count; i++) { if (!mpdNodes.Contains(edges[i])) { if (edge.EndNode == edges[i].StartNode && !used[edges[i].EndNode]) { priority.Add(edges[i]); } } } } } }
private void buttonGerar_Click(object sender, EventArgs e) { Task.Run(() => { ClearText(); Random random = new Random((int) DateTime.Now.Ticks); SortedSet<string> numerosCertidao = new SortedSet<string>(); int quantidade; if (!Int32.TryParse(textBoxQtd.Text, out quantidade)) { InsertText(String.Format("Erro: \"{0}\" não é uma quantidade válida.", textBoxQtd.Text)); return; } while (numerosCertidao.Count() < quantidade) { string numeroCertidao = CertidaoNascimentoHelper.GerarNumeroCertidao(random); numeroCertidao = numeroCertidao.Substring(0, 6) + " " + numeroCertidao.Substring(6, 2) + " " + numeroCertidao.Substring(8, 2) + " " + numeroCertidao.Substring(10, 4) + " " + numeroCertidao.Substring(14, 1) + " " + numeroCertidao.Substring(15, 5) + " " + numeroCertidao.Substring(20, 3) + " " + numeroCertidao.Substring(23, 7) + " " + numeroCertidao.Substring(30, 2); numerosCertidao.Add(numeroCertidao); } foreach (var numero in numerosCertidao) { InsertText(numero); } }); }
static void Main(string[] args) { int n = int.Parse(Console.ReadLine()); SortedSet<long> numberCollection = new SortedSet<long>(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < n; i++) { long currentNumber = long.Parse(Console.ReadLine()); if (!numberCollection.Contains(currentNumber)) { numberCollection.Add(currentNumber); } else { numberCollection.Remove(currentNumber); } if (numberCollection.Count == 0) { sb.AppendLine("There are no numbers"); } else if (numberCollection.Count == 1) { sb.AppendLine("There is only one number"); } else { long minDifference = CalculateMinDifference(numberCollection); sb.AppendLine(minDifference.ToString()); } } Console.Write(sb.ToString()); }
public static void Main() { var priority = new SortedSet<Edge>(); const int numberOfNodes = 6; var used = new bool[numberOfNodes + 1]; var mpdNodes = new List<Edge>(); var edges = new List<Edge>(); edges.Add(new Edge(1, 3, 5)); edges.Add(new Edge(1, 2, 4)); edges.Add(new Edge(1, 4, 9)); edges.Add(new Edge(2, 4, 2)); edges.Add(new Edge(3, 4, 20)); edges.Add(new Edge(3, 5, 7)); edges.Add(new Edge(4, 5, 8)); edges.Add(new Edge(5, 6, 12)); foreach (var edge in edges) { if (edge.StartNode == edges[0].StartNode) { priority.Add(edge); } } used[edges[0].StartNode] = true; FindMinimumSpanningTree(used, priority, mpdNodes, edges); PrintMinimumSpanningTree(mpdNodes); }
protected override void LoadChildren() { base.Children.Add(new ObjectViewModel(m_databaseLocation, this, m_databaseLocation.Session)); SortedSet<Database> dbSet = new SortedSet<Database>(m_databaseLocation.Session.OpenLocationDatabases(m_databaseLocation, false)); foreach (Database database in dbSet) base.Children.Add(new DatabaseViewModel(this, database)); }
public virtual void Unsubscribe(string szInstrument, string szExchange) { lock (locker) { IntPtr szInstrumentPtr = Marshal.StringToHGlobalAnsi(szInstrument); IntPtr szExchangePtr = Marshal.StringToHGlobalAnsi(szExchange); proxy.XRequest((byte)RequestType.Unsubscribe, Handle, IntPtr.Zero, 0, 0, szInstrumentPtr, 0, szExchangePtr, 0, IntPtr.Zero, 0); SortedSet<string> instruments; if (!_SubscribedInstruments.TryGetValue(szExchange, out instruments)) { instruments = new SortedSet<string>(); _SubscribedInstruments[szExchange] = instruments; } szInstrument.Split(new char[2] { ';', ',' }).ToList().ForEach(x => { instruments.Remove(x); }); Marshal.FreeHGlobal(szInstrumentPtr); Marshal.FreeHGlobal(szExchangePtr); } }
public void ListTests_Exception() { SortedSet<string> phoneNumbers = new SortedSet<string> { "+35929811111" }; PhonebookRepository phonebook = new PhonebookRepository(); phonebook.AddPhone("Kalina", phoneNumbers); phonebook.ListEntries(10, 10); }
public UnitOfWork() { this.unitsByType = new Dictionary<string, SortedSet<Unit>>(); this.unitNames = new HashSet<string>(); this.unitsByAttack = new SortedDictionary<int, SortedSet<Unit>>(); this.allAttacks = new SortedSet<int>(); }
public override void SetUp() { base.SetUp(); // we generate aweful regexps: good for testing. // but for preflex codec, the test can be very slow, so use less iterations. NumIterations = Codec.Default.Name.Equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : AtLeast(50); Dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000))); Document doc = new Document(); Field field = NewStringField("field", "", Field.Store.YES); doc.Add(field); Terms = new SortedSet<BytesRef>(); int num = AtLeast(200); for (int i = 0; i < num; i++) { string s = TestUtil.RandomUnicodeString(Random()); field.StringValue = s; Terms.Add(new BytesRef(s)); writer.AddDocument(doc); } TermsAutomaton = BasicAutomata.MakeStringUnion(Terms); Reader = writer.Reader; Searcher = NewSearcher(Reader); writer.Dispose(); }
public void TestCollectionContains() { var sortedSet = new SortedSet<int>(); sortedSet.Add(10); sortedSet.Add(5); sortedSet.Add(6); sortedSet.Add(20); sortedSet.Add(13); sortedSet.Add(14); Assert.IsTrue(sortedSet.Contains(10)); Assert.IsTrue(sortedSet.Contains(5)); Assert.IsTrue(sortedSet.Contains(6)); Assert.IsTrue(sortedSet.Contains(20)); Assert.IsTrue(sortedSet.Contains(13)); Assert.IsTrue(sortedSet.Contains(14)); for (int i = 0; i < 4; i++) { Assert.IsFalse(sortedSet.Contains(i)); } for (int i = 21; i < 50; i++) { Assert.IsFalse(sortedSet.Contains(i)); } }
public void AddBunny(string name, int team, int roomId) { if(!Rooms.ContainsKey(roomId)) { throw new ArgumentException("Room " + roomId + " does not exist."); } if(BunniesByName.ContainsKey(name)) { throw new ArgumentException("Bunny " + name + " already exist."); } if (!BunniesByTeam.ContainsKey(team)) { BunniesByTeam[team] = new SortedSet<Bunny>(); } if(!BunniesByRoomTeam.ContainsKey(roomId)) { BunniesByRoomTeam[roomId] = new Dictionary<int, SortedSet<Bunny>>(); } if (!BunniesByRoomTeam[roomId].ContainsKey(team)) { BunniesByRoomTeam[roomId][team] = new SortedSet<Bunny>(); } var bunny = new Bunny(name, team, roomId); Rooms[roomId].Add(bunny); BunniesByName[name] = bunny; BunniesByTeam[team].Add(bunny); BunniesByRoomTeam[roomId][team].Add(bunny); }
public void SpecialCharsTagPreprocessorTest() { var tagPreprocessor = new SpecialCharsTagPreprocessor(); var inputTags = new SortedSet<string>(new[] { "test1_XYZ1", "test2.XYZ2", "test3-XYZ3", "test4+XYZ4", "test5,XYZ5", "test6;XYZ6", "test7 XYZ7", "test8\tXYZ8" }); var outputTags = new SortedSet<string>(new[] { "test1XYZ1", "test2XYZ2", "test3XYZ3", "test4XYZ4", "test5", "XYZ5", "test6", "XYZ6", "test7", "XYZ7", "test8", "XYZ8" }); var resultTags = tagPreprocessor.Preprocess(inputTags); Assert.AreEqual(outputTags, resultTags); }
static void Main() { Dictionary<string, SortedDictionary<string,SortedSet<string>>> NightLife = new Dictionary<string, SortedDictionary<string, SortedSet<string>>>(); string city = ""; string venue = ""; string performars = ""; string[] input = Console.ReadLine().Split(';'); while (input[0] != "END") { city = input[0]; venue = input[1]; performars = input[2]; if (!NightLife.ContainsKey(city)) { NightLife[city] = new SortedDictionary<string, SortedSet<string>>(); } if (!NightLife[city].ContainsKey(venue)) { NightLife[city][venue] = new SortedSet<string>(); } NightLife[city][venue].Add(performars); input = Console.ReadLine().Split(';'); } foreach (var cityPair in NightLife) { Console.WriteLine(cityPair.Key); foreach (var venuePair in cityPair.Value) { Console.WriteLine("->{0}: {1}",venuePair.Key,String.Join(",",venuePair.Value)); } } }
public void FindMinimumSpanningTree() { SortedSet<EdgeWeighted> priority = new SortedSet<EdgeWeighted>(); bool[] usedEdges = new bool[this.n + 1]; List<EdgeWeighted> mpdEdges = new List<EdgeWeighted>(); // adding edges that connect the node 1 with all the others - 2, 3, 4 ... foreach (EdgeWeighted edge in this.edges) { if (edge.StartNode == this.edges[0].StartNode) { priority.Add(edge); } } usedEdges[this.edges[0].StartNode] = true; while (priority.Count > 0) { EdgeWeighted edge = priority.Min; priority.Remove(edge); if (!usedEdges[edge.EndNode]) { usedEdges[edge.EndNode] = true; // we "visit" this node mpdEdges.Add(edge); this.AddEdges(edge, this.edges, mpdEdges, priority, usedEdges); } } this.PrintMinimumSpanningTree(mpdEdges); }
static void Main() { char[] delimiters = { ' ', ',', '.', '?', '!' }; string[] words = Console.ReadLine().Split(delimiters, StringSplitOptions.RemoveEmptyEntries); SortedSet<string> palindromes = new SortedSet<string>(); for (int currentWord = 0; currentWord < words.Length; currentWord++) { string wordsFirstHalf = string.Empty; string wordsSecondHalfReversed = string.Empty; for (int currentChar = 0; currentChar < words[currentWord].Length / 2; currentChar++) { wordsFirstHalf += words[currentWord][currentChar]; } for (int currentChar = words[currentWord].Length - 1; currentChar >= words[currentWord].Length / 2; currentChar--) { if (currentChar == words[currentWord].Length / 2 && words[currentWord].Length % 2 != 0) { break; } wordsSecondHalfReversed += words[currentWord][currentChar]; } if (wordsFirstHalf == wordsSecondHalfReversed || words[currentWord].Length == 1) { palindromes.Add(words[currentWord]); } } Console.WriteLine(string.Join(", ", palindromes)); }
static void FillDictionary(Dictionary<string, SortedDictionary<string, SortedSet<string>>> schedule) { string input = Console.ReadLine(); while (input != "END") { string[] splitInput = input.Split(';'); string city = splitInput[0]; string club = splitInput[1]; string performer = splitInput[2]; var performers = new SortedSet<string>() { performer }; var clubs = new SortedDictionary<string, SortedSet<string>>() { { club, performers } }; if (!schedule.Keys.Contains(city)) { schedule.Add(city, clubs); } else { if (!schedule[city].Keys.Contains(club)) { schedule[city].Add(club, performers); } else { schedule[city][club].Add(performer); } } input = Console.ReadLine(); } }
public RenderObjectODF(odfParser parser, HashSet<int> meshIDs) { HighlightSubmesh = new SortedSet<int>(); highlightMaterial = new Material(); highlightMaterial.Ambient = new Color4(1, 1, 1, 1); highlightMaterial.Diffuse = new Color4(1, 0, 1, 0); this.device = Gui.Renderer.Device; Textures = new Texture[parser.TextureSection != null ? parser.TextureSection.Count : 0]; TextureDic = new Dictionary<int, int>(parser.TextureSection != null ? parser.TextureSection.Count : 0); Materials = new Material[parser.MaterialSection.Count]; BoneMatrixDic = new Dictionary<string, Matrix>(); rootFrame = CreateHierarchy(parser, meshIDs, device, out meshFrames); AnimationController = new AnimationController(numFrames, 30, 30, 1); Frame.RegisterNamedMatrices(rootFrame, AnimationController); for (int i = 0; i < meshFrames.Count; i++) { if (i == 0) { Bounds = meshFrames[i].Bounds; } else { Bounds = BoundingBox.Merge(Bounds, meshFrames[i].Bounds); } } }
//产生候选集:使用《数据挖掘导论》上P210页的方法 static List<SortedSet<string>> AprioriGen(List<SortedSet<string>> kFequentSet) { List<SortedSet<string>> result = new List<SortedSet<string>>(); for (int i = 0; i < kFequentSet.Count; i++) { SortedSet<string> aTmpSet = new SortedSet<string>(kFequentSet[i]); string aLastElement = kFequentSet[i].Last<string>(); aTmpSet.Remove(aLastElement);//去掉最后一个元素 for (int j = i + 1; j < kFequentSet.Count; j++) { SortedSet<string> bTmpSet = new SortedSet<string>(kFequentSet[j]); string bLastElement = kFequentSet[j].Last<string>(); bTmpSet.Remove(bLastElement);//去掉最后一个元素 if (bTmpSet.Count == aTmpSet.Count) { bTmpSet.ExceptWith(aTmpSet); if (bTmpSet.Count == 0 && !aLastElement.Equals(bLastElement))//前k-2个元素相同而最后一个元素不同 { result.Add(new SortedSet<string>(kFequentSet[i])); result[result.Count - 1].Add(bLastElement); } } } } return result; }
public ActionResult GetContactList() { if (!PingNotif()) { return new HttpStatusCodeResult(HttpStatusCode.Unauthorized); } logger.LogActionEnter(Session.SessionID, "/Service/GetContactList"); SortedSet<GetContactListResponse_User> list = new SortedSet<GetContactListResponse_User>(); User user = UserManager.GetUser(System.Web.HttpContext.Current.User.Identity.Name); Dictionary<string, User> contact_list = UserManager.GetContactList(user); foreach(KeyValuePair<string, User> u in contact_list) { list.Add(new GetContactListResponse_User { user_id = u.Value.user_id, login = u.Value.login, nickname = u.Key, status = (int)u.Value.status, description = u.Value.descripton }); } logger.LogActionLeave(Session.SessionID, "/Service/GetContactList", list.Count + " contacts sent"); return Json(list.OrderBy(u => u.user_id)); }
public static void Main() { Console.WriteLine("A TV company needs to lay cables to a new neighborhood (for every house)."); Console.WriteLine("Some of the paths are longer. Find a way to minimize the cost for cables.\n"); SortedSet<Edge> priority = new SortedSet<Edge>(); int numberOfNodes = 8; bool[] used = new bool[numberOfNodes + 1]; List<Edge> mpdNodes = new List<Edge>(); List<Edge> edges = new List<Edge>(); InitializeGraph(edges); Console.WriteLine("The paths from house to house are:"); //adding edges that connect the node 1 with all the others - 2, 3, 4 for (int i = 0; i < edges.Count; i++) { Console.WriteLine("{0}", edges[i]); if (edges[i].StartNode == edges[0].StartNode) { priority.Add(edges[i]); } } used[edges[0].StartNode] = true; FindMinimumSpanningTree(used, priority, mpdNodes, edges); PrintMinimumSpanningTree(mpdNodes); }
public Namespace ConvertForwardReferencesToNamespaces( IEnumerable<CLITypeReference> typeReferences) { // Create a new tree of namespaces out of the type references found. var rootNamespace = new TranslationUnit(); rootNamespace.Module = TranslationUnit.Module; var sortedRefs = typeReferences.ToList(); sortedRefs.Sort((ref1, ref2) => string.CompareOrdinal(ref1.FowardReference, ref2.FowardReference)); var forwardRefs = new SortedSet<string>(); foreach (var typeRef in sortedRefs) { if (string.IsNullOrWhiteSpace(typeRef.FowardReference)) continue; var declaration = typeRef.Declaration; if (!(declaration.Namespace is Namespace)) continue; if (!forwardRefs.Add(typeRef.FowardReference)) continue; if (typeRef.Include.InHeader) continue; var @namespace = FindCreateNamespace(rootNamespace, declaration); @namespace.TypeReferences.Add(typeRef); } return rootNamespace; }
public static void TestCopyConstructor() { SortedSet<int> sortedSet = new SortedSet<int>(); List<int> listOfItems = new List<int>(); for (int i = 0; i < 10000; i++) { if (!sortedSet.Contains(i)) { sortedSet.Add(i); listOfItems.Add(i); } } SortedSet<int> newTree1 = new SortedSet<int>(listOfItems); Assert.True(newTree1.SetEquals(listOfItems)); //"Expected to be the same set." SortedSet<int> newTree2 = new SortedSet<int>(sortedSet); Assert.True(sortedSet.SetEquals(newTree2)); //"Expected to be the same set." Assert.Equal(sortedSet.Count, newTree1.Count); //"Should be equal." Assert.Equal(sortedSet.Count, newTree2.Count); //"Copied tree not the same as base" }
/// <inheritdoc /> public override void ExecuteResult(ControllerContext context) { var response = context.HttpContext.Response; response.StatusCode = 200; response.ContentType = "application/x-" + this.service + "-advertisement"; response.BinaryWrite(ProtocolUtils.PacketLine("# service=" + this.service + "\n")); response.BinaryWrite(ProtocolUtils.EndMarker); var ids = new SortedSet<string>(this.repo.Refs.Select(r => r.TargetIdentifier)); var first = true; foreach (var id in ids) { var line = first ? string.Format("{0} refs/anonymous/{0}\0{1}\n", id, this.GetCapabilities()) : string.Format("{0} refs/anonymous/{0}\n", id); response.BinaryWrite(ProtocolUtils.PacketLine(line)); first = false; } if (first) { var line = string.Format("{0} capabilities^{{}}\0{1}\n", ProtocolUtils.ZeroId, this.GetCapabilities()); response.BinaryWrite(ProtocolUtils.PacketLine(line)); } response.BinaryWrite(ProtocolUtils.EndMarker); response.End(); }
static void Main() { var findedAreas = new SortedSet<Area>(); for (int row = 0; row < matrix.GetLength(0); row++) { for (int col = 0; col < matrix.GetLength(1); col++) { if (matrix[row, col] == ' ') { GetConnectedAreaSize(row, col); var area = new Area(row, col, areaSize); findedAreas.Add(area); areaSize = 0; } } } if (findedAreas.Any()) { Console.WriteLine("Total areas found: {0}", findedAreas.Count); int number = 0; foreach (var area in findedAreas) { ++number; Console.WriteLine("Area #{0} at {1}", number, area.ToString()); } } }
private static void AddRecord(String city, String venue, String performer) { if (_program.ContainsKey(city)) { if (_program[city].ContainsKey(venue)) { _program[city][venue].Add(performer);//If value is already present, add will be ignored. No need to check it explicitly } else { var newPerfmerSet = new SortedSet<String>(); newPerfmerSet.Add(performer); _program[city].Add(venue, newPerfmerSet); } } else { var newPerformerSet = new SortedSet<String>(); var newVenueSortedDictionary = new SortedDictionary<String, SortedSet<String>>(); newPerformerSet.Add(performer); newVenueSortedDictionary.Add(venue, newPerformerSet); _program.Add(city, newVenueSortedDictionary); } }
public PffColorizer() { HighlightWordReader hwr = new HighlightWordReader(HighlightWordReader.PffFilename); _typeWords = hwr.ReadWordBlock(true); _headingWords = hwr.ReadWordBlock(true); _attributeWords = hwr.ReadWordBlock(true); }
public static void Main(string[] args) { SortedDictionary<string, SortedSet<Fullname>> courseDictionary = new SortedDictionary<string, SortedSet<Fullname>>(); using (FileStream fs = new FileStream("students.txt", FileMode.Open)) { StreamReader reader = new StreamReader(fs); while (!reader.EndOfStream) { var lineItems = reader.ReadLine().Split('|').Select(x => x.Trim()).ToArray(); SortedSet<Fullname> students; if (!courseDictionary.TryGetValue(lineItems[2], out students)) { students = new SortedSet<Fullname>(); courseDictionary.Add(lineItems[2], students); } students.Add(new Fullname() { FirstName = lineItems[0], LastName = lineItems[1] }); } } foreach (var course in courseDictionary) { Console.WriteLine("{0}:{1}", course.Key, string.Join(",", course.Value)); } }
public Library(params Book[] books) { this.books = new SortedSet <Book>(books, new BookComparator()); }
public void IntersectWith_Null() { var set = new SortedSet <int> (); set.IntersectWith(null); }
public DemoParserSubCommand(IImmutableList <BaseOption <DemoParsingSetupInfo, DemoParsingInfo> > options) : base(options) { _argPaths = new List <FileSystemInfo>(); _demoPaths = new SortedSet <FileInfo>(new AlphanumComparatorFileInfo()); }
public void CtorDefault() { var set = new SortedSet <int> (); Assert.IsNotNull(set.Comparer); }
public void SymmetricExceptWith_Null() { var set = new SortedSet <int> (); set.SymmetricExceptWith(null); }
public void ExceptWith_Null() { var set = new SortedSet <int> (); set.ExceptWith(null); }
public void CtorNullComparer() { var set = new SortedSet <int> ((IComparer <int>)null); Assert.AreEqual(Comparer <int> .Default, set.Comparer); }
public void UnionWith_Null() { var set = new SortedSet <int> (); set.UnionWith(null); }
private SortedSet <EnergyBound> BreadthFirstBranchAndBoundTraverse(ShapeConstraints constraints) { SortedSet <EnergyBound> front = new SortedSet <EnergyBound> { this.CalculateEnergyBound(constraints) }; int currentIteration = 1; DateTime lastOutputTime = startTime; int processedConstraintSets = 0; while (!front.Min.Constraints.CheckIfSatisfied(this.maxCoordFreedom, this.maxWidthFreedom) && !this.IsStopping) { this.WaitIfPaused(); EnergyBound parentLowerBound = front.Min; front.Remove(parentLowerBound); List <ShapeConstraints> expandedConstraints = parentLowerBound.Constraints.SplitMostFree(this.maxCoordFreedom, this.maxWidthFreedom); foreach (ShapeConstraints constraintsSet in expandedConstraints) { EnergyBound lowerBound = this.CalculateEnergyBound(constraintsSet); front.Add(lowerBound); // Uncomment for strong invariants check //ObjectBackgroundTerm[,] lowerBoundShapeTerm = new ObjectBackgroundTerm[this.segmentedImage.Width, this.segmentedImage.Height]; //for (int i = 0; i < this.segmentedImage.Width; ++i) // for (int j = 0; j < this.segmentedImage.Height; ++j) // lowerBoundShapeTerm[i, j] = CpuBranchAndBoundShapeTermsCalculator.CalculateShapeTerm(lowerBound.Constraints, new Point(i, j)); //ObjectBackgroundTerm[,] parentLowerBoundShapeTerm = new ObjectBackgroundTerm[this.segmentedImage.Width, this.segmentedImage.Height]; //for (int i = 0; i < this.segmentedImage.Width; ++i) // for (int j = 0; j < this.segmentedImage.Height; ++j) // parentLowerBoundShapeTerm[i, j] = CpuBranchAndBoundShapeTermsCalculator.CalculateShapeTerm(parentLowerBound.Constraints, new Point(i, j)); //for (int i = 0; i < this.segmentedImage.Width; ++i) // for (int j = 0; j < this.segmentedImage.Height; ++j) // { // Debug.Assert(lowerBoundShapeTerm[i, j].ObjectTerm >= parentLowerBoundShapeTerm[i, j].ObjectTerm - 1e-7); // Debug.Assert(lowerBoundShapeTerm[i, j].BackgroundTerm >= parentLowerBoundShapeTerm[i, j].BackgroundTerm - 1e-7); // //CalculateShapeTerm(lowerBound.Constraints, new Point(0, 67)); // //CalculateShapeTerm(parentLowerBound.Constraints, new Point(0, 67)); // } // Lower bound should not decrease (check always, it's important!) Trace.Assert(lowerBound.SegmentationEnergy >= parentLowerBound.SegmentationEnergy - 1e-6); Trace.Assert(lowerBound.ShapeEnergy >= parentLowerBound.ShapeEnergy - 1e-6); //this.CalculateEnergyBound(lowerBound.Constraints); //this.CalculateEnergyBound(parentLowerBound.Constraints); ++processedConstraintSets; } // Some debug output if (currentIteration % this.ProgressReportRate == 0) { DateTime currentTime = DateTime.Now; EnergyBound currentMin = front.Min; DebugConfiguration.WriteDebugText( "On iteration {0} front contains {1} constraint sets.", currentIteration, front.Count); DebugConfiguration.WriteDebugText( "Current lower bound is {0:0.0000} ({1:0.0000} + {2:0.0000}).", currentMin.Bound, currentMin.SegmentationEnergy, currentMin.ShapeEnergy * this.ShapeEnergyWeight); double processingSpeed = processedConstraintSets / (currentTime - lastOutputTime).TotalSeconds; DebugConfiguration.WriteDebugText("Processing speed is {0:0.000} items per sec", processingSpeed); double maxVertexConstraintsFreedom = currentMin.Constraints.VertexConstraints.Max(c => c.Freedom); double maxEdgeConstraintsFreedom = currentMin.Constraints.EdgeConstraints.Max(c => c.Freedom); DebugConfiguration.WriteDebugText( "Max vertex freedom: {0:0.00}, max edge freedom: {1:0.00}", maxVertexConstraintsFreedom, maxEdgeConstraintsFreedom); DebugConfiguration.WriteDebugText("Elapsed time: {0}", DateTime.Now - this.startTime); DebugConfiguration.WriteDebugText(); this.ReportBranchAndBoundProgress(front); lastOutputTime = currentTime; processedConstraintSets = 0; } currentIteration += 1; } return(front); }
public bool Equals(SortedSet <T>?x, SortedSet <T>?y) => SortedSet <T> .SortedSetEquals(x, y, _comparer);
void buildNavMesh(uint mapID, out dtNavMesh navMesh) { // if map has a parent we use that to generate dtNavMeshParams - worldserver will load all missing tiles from that map int navMeshParamsMapId = _vmapManager.GetParentMapId(mapID); if (navMeshParamsMapId == -1) { navMeshParamsMapId = (int)mapID; } SortedSet <uint> tiles = getTileList((uint)navMeshParamsMapId); // old code for non-statically assigned bitmask sizes: ///*** calculate number of bits needed to store tiles & polys ***/ //int tileBits = dtIlog2(dtNextPow2(tiles.size())); //if (tileBits < 1) tileBits = 1; // need at least one bit! //int polyBits = sizeof(dtPolyRef)*8 - SALT_MIN_BITS - tileBits; int polyBits = SharedConst.DT_POLY_BITS; int maxTiles = tiles.Count; int maxPolysPerTile = 1 << polyBits; /*** calculate bounds of map ***/ uint tileXMin = 64, tileYMin = 64, tileXMax = 0, tileYMax = 0; foreach (var it in tiles) { StaticMapTree.UnpackTileID(it, out uint tileX, out uint tileY); if (tileX > tileXMax) { tileXMax = tileX; } else if (tileX < tileXMin) { tileXMin = tileX; } if (tileY > tileYMax) { tileYMax = tileY; } else if (tileY < tileYMin) { tileYMin = tileY; } } // use Max because '32 - tileX' is negative for values over 32 float[] bmin; float[] bmax; getTileBounds(tileXMax, tileYMax, null, 0, out bmin, out bmax); /*** now create the navmesh ***/ // navmesh creation params dtNavMeshParams navMeshParams = new dtNavMeshParams(); navMeshParams.tileWidth = SharedConst.GRID_SIZE; navMeshParams.tileHeight = SharedConst.GRID_SIZE; rcVcopy(navMeshParams.orig, bmin); navMeshParams.maxTiles = maxTiles; navMeshParams.maxPolys = maxPolysPerTile; navMesh = new dtNavMesh(); if (dtStatusFailed(navMesh.init(navMeshParams))) { Console.WriteLine($"[Map: {mapID:D4}] Failed creating navmesh!"); return; } string fileName = $"mmaps/{mapID:D4}.mmap"; using (BinaryWriter writer = new BinaryWriter(File.Open(fileName, FileMode.Create, FileAccess.Write))) { // now that we know navMesh params are valid, we can write them to file writer.Write(bmin[0]); writer.Write(bmin[1]); writer.Write(bmin[2]); writer.Write(SharedConst.GRID_SIZE); writer.Write(SharedConst.GRID_SIZE); writer.Write(maxTiles); writer.Write(maxPolysPerTile); } }
protected override SegmentationSolution SegmentCurrentImage() { if (this.minEdgeWidth >= this.maxEdgeWidth) { throw new InvalidOperationException("Min edge width should be less than max edge width."); } if (this.startConstraints != null) { if (this.startConstraints.ShapeStructure != this.ShapeModel.Structure) { throw new InvalidOperationException("Given start constraints have shape structure different from the one specified in shape model."); } // TODO: make this check work //foreach (VertexConstraints vertexConstraints in this.startConstraints.VertexConstraints) //{ // RectangleF imageRectangle = // new RectangleF(0, 0, this.ImageSegmentator.ImageSize.Width, this.ImageSegmentator.ImageSize.Height); // if (!imageRectangle.Contains(vertexConstraints.CoordRectangle)) // throw new InvalidOperationException("Given start constraints are not fully inside the segmented image."); //} } this.shapeUnaryTerms = new Image2D <ObjectBackgroundTerm>( this.ImageSegmentator.ImageSize.Width, this.ImageSegmentator.ImageSize.Height); ShapeConstraints constraints = this.startConstraints; if (constraints == null) { constraints = ShapeConstraints.CreateFromBounds( this.ShapeModel.Structure, Vector.Zero, new Vector(this.ImageSegmentator.ImageSize.Width, this.ImageSegmentator.ImageSize.Height), this.minEdgeWidth, this.maxEdgeWidth); } if (this.BranchAndBoundStarted != null) { this.BranchAndBoundStarted(this, EventArgs.Empty); } this.startTime = DateTime.Now; DebugConfiguration.WriteImportantDebugText("Breadth-first branch-and-bound started."); SortedSet <EnergyBound> front = this.BreadthFirstBranchAndBoundTraverse(constraints); ReportBranchAndBoundCompletion(front.Min); if (front.Min.Constraints.CheckIfSatisfied(this.maxCoordFreedom, this.maxWidthFreedom)) { DebugConfiguration.WriteImportantDebugText("Breadth-first branch-and-bound finished in {0}.", DateTime.Now - this.startTime); DebugConfiguration.WriteImportantDebugText("Best lower bound is {0:0.0000}", front.Min.Bound); } else { DebugConfiguration.WriteImportantDebugText("Breadth-first branch-and-bound forced to stop after {0}.", DateTime.Now - this.startTime); DebugConfiguration.WriteImportantDebugText("Min energy value achieved is {0:0.0000}", front.Min.Bound); } EnergyBound collapsedBfsSolution = this.CalculateEnergyBound(front.Min.Constraints.Collapse()); Shape resultShape = front.Min.Constraints.CollapseToShape(); DebugConfiguration.WriteImportantDebugText( "Collapsed solution energy value is {0:0.0000} ({1:0.0000} + {2:0.0000})", collapsedBfsSolution.Bound, collapsedBfsSolution.SegmentationEnergy, collapsedBfsSolution.ShapeEnergy * this.ShapeEnergyWeight); return(new SegmentationSolution(resultShape, this.ImageSegmentator.GetLastSegmentationMask(), collapsedBfsSolution.Bound)); }
private static SourceText Build(ITypeSymbol source, ITypeSymbol destination, ImmutableArray <string> maps, ConfigurationValues configurationValues) { using var writer = new StringWriter(); using var indentWriter = new IndentedTextWriter(writer, configurationValues.IndentStyle == IndentStyle.Tab ? "\t" : new string (' ', (int)configurationValues.IndentSize)); var usingStatements = new SortedSet <string>(); if (!source.IsValueType) { usingStatements.Add("using System;"); } ; if (!destination.ContainingNamespace.IsGlobalNamespace && !source.ContainingNamespace.ToDisplayString().StartsWith( destination.ContainingNamespace.ToDisplayString(), StringComparison.InvariantCulture)) { usingStatements.Add($"using {destination.ContainingNamespace.ToDisplayString()};"); } foreach (var usingStatement in usingStatements) { indentWriter.WriteLine(usingStatement); } if (usingStatements.Count > 0) { indentWriter.WriteLine(); } if (!source.ContainingNamespace.IsGlobalNamespace) { indentWriter.WriteLine($"namespace {source.ContainingNamespace.ToDisplayString()}"); indentWriter.WriteLine("{"); indentWriter.Indent++; } indentWriter.WriteLine($"public static partial class {source.Name}MapToExtensions"); indentWriter.WriteLine("{"); indentWriter.Indent++; indentWriter.WriteLine($"public static {destination.Name} MapTo{destination.Name}(this {source.Name} self) =>"); indentWriter.Indent++; if (!source.IsValueType) { indentWriter.WriteLine("self is null ? throw new ArgumentNullException(nameof(self)) :"); indentWriter.Indent++; } indentWriter.WriteLine($"new {destination.Name}"); indentWriter.WriteLine("{"); indentWriter.Indent++; foreach (var map in maps) { indentWriter.WriteLine(map); } indentWriter.Indent--; indentWriter.WriteLine("};"); if (!source.IsValueType) { indentWriter.Indent--; } indentWriter.Indent--; indentWriter.Indent--; indentWriter.WriteLine("}"); if (!source.ContainingNamespace.IsGlobalNamespace) { indentWriter.Indent--; indentWriter.WriteLine("}"); } return(SourceText.From(writer.ToString(), Encoding.UTF8)); }
public MapTiles(uint id, SortedSet <uint> tiles) { m_mapId = id; m_tiles = tiles; }
private void parseButton_Click(object sender, EventArgs e) { string str = textDisplay.Text; Console.WriteLine(str); Token[] tokens = Tokenizer.Tokenize(str); Set[] sets = Parser.GenerateSets(tokens.Count()); SortedSet <char> sorted = Parser.GetSetNames(tokens); Dictionary <char, Set> dict = Parser.FillDictionary(sorted, sets); List <Set> setList = new List <Set>(); string parsed = Parser.Parse(tokens); textBox1.Text = parsed; foreach (char c in dict.Keys) { Console.WriteLine(c); DisplayBinary(dict[c]); Console.WriteLine(); } for (int i = 0; i < parsed.Length; i++) { char c = parsed[i]; if (Tokenizer.IsSet(c)) { setList.Add(dict[c]); } else { switch (c) { case Sign.Union: setList.Add(UnionSet(setList[setList.Count - 2], setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 3); setList.RemoveAt(setList.Count - 2); break; case Sign.Complement: setList.Add(Falsify(setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 2); break; case Sign.Equal: setList.Add(EqualitySet(setList[setList.Count - 2], setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 3); setList.RemoveAt(setList.Count - 2); break; case Sign.Intersection: setList.Add(IntersectionSet(setList[setList.Count - 2], setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 3); setList.RemoveAt(setList.Count - 2); break; case Sign.Difference: setList.Add(DiffSet(setList[setList.Count - 2], setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 3); setList.RemoveAt(setList.Count - 2); break; case Sign.Implication: setList.Add(ImplicationSet(setList[setList.Count - 2], setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 3); setList.RemoveAt(setList.Count - 2); break; case Sign.Inclusion: setList.Add(InclusionSet(setList[setList.Count - 2], setList[setList.Count - 1])); DisplayBinary(setList[setList.Count - 1]); setList.RemoveAt(setList.Count - 3); setList.RemoveAt(setList.Count - 2); break; } } textBox2.Text = IsTrue(setList[setList.Count - 1]).ToString(); } }
internal static SortedSet <int> TailSet(this SortedSet <int> set, int value) { return(set.GetViewBetween(value, 9999999)); }
static void Main(string[] args) { int people = int.Parse(Console.ReadLine().Split().Last()); int tasks = int.Parse(Console.ReadLine().Split().Last()); int nodes = people + tasks + 2; graph = new int[nodes][]; for (int i = 0; i < graph.Length; i++) { graph[i] = new int[nodes]; } for (int i = 0; i < people; i++) { graph[0][i + 1] = 1; } for (int i = 0; i < tasks; i++) { graph[i + people + 1][graph.Length - 1] = 1; } for (int i = 0; i < people; i++) { var line = Console.ReadLine(); for (int j = 0; j < tasks; j++) { if (line[j] == 'Y') { graph[i + 1][j + people + 1] = 1; } } } FindMaxFlow(); var queue = new Queue <int>(); var result = new SortedSet <string>(); var visited = new bool[graph.Length]; int start = 0; int end = graph.Length - 1; queue.Enqueue(end); while (queue.Count > 0) { var node = queue.Dequeue(); visited[node] = true; for (int i = 0; i < graph.Length; i++) { if (graph[node][i] > 0 && !visited[i]) { queue.Enqueue(i); visited[i] = true; if (node != end && node != start && i != end && i != start) { result.Add($"{(char)(i - 1 + 'A')}-{node - people}"); } } } } Console.WriteLine(String.Join(Environment.NewLine, result)); }
// Update is called once per frame void Update() { if (!HasAdrenaline && frame++ < frequency) { return; } frame = 0; float nearest = float.MaxValue; var objectsWithinSenses = Physics.OverlapSphere(transform.position, maxSenseRange, targetMask); seenObjects.Clear(); heardObjects.Clear(); smelledObjects.Clear(); // TODO: We may want to think about creating a new set each frame. This was just updating // the set, but I was getting a concurrent modification exception from AnimalUtilityData since // it is operating in a coroutine instead of on the main thread. Let's discuss how to properly // optimize this. We could potentially go with a static list and create a copy for consumers // that request the list when they want it. We can also optimize consumers of the list to use // the OnNewObjectsSensed listener for updates var sensedObjects = new SortedSet <SensedObject>(); var newlySensedObjects = new HashSet <SensedObject>(); float detectionTime = Time.fixedTime; for (int i = 0; i < objectsWithinSenses.Length; i++) { Collider collider = objectsWithinSenses[i]; SensedObject sensedObject; if (collider.gameObject == gameObject) { continue; } if (null != sensedObjectValidationListener && !sensedObjectValidationListener(collider.gameObject)) { continue; } if (!rememberedObjects.TryGetValue(collider.gameObject, out sensedObject)) { sensedObject = new SensedObject(collider.gameObject); if (null != sensedObject.sensableObject && !sensedObject.sensableObject.IsSensable) { sensedObject.sensableObject.onNoLongerSensableListener += () => { Forget(sensedObject); }; } rememberedObjects[collider.gameObject] = sensedObject; } float actualDistance = Vector3.Distance(collider.transform.position, transform.position); float sensedDistance = actualDistance; // Detection from least accurate to most. if (actualDistance < scentRange) { smelledObjects.Add(collider.gameObject); if (sensedObject.actualPosition != collider.transform.position) { sensedObject.position = collider.transform.position + UnityEngine.Random.insideUnitSphere * scentAccuracyRadius; } sensedDistance = Vector3.Distance(collider.transform.position, transform.position); sensedObject.smelled = true; } if (CanSee(collider.gameObject)) { seenObjects.Add(collider.gameObject); sensedObject.position = collider.transform.position; sensedObject.seen = true; } if (actualDistance < implicitDetectionRange) { implicitDetectedObjects.Add(collider.gameObject); sensedObject.position = collider.transform.position; sensedObject.implicitlyDetected = true; } sensedObject.actualPosition = collider.transform.position; sensedObject.distance = sensedDistance; if (sensedObject.distance < nearest) { var o = NearestSensedObject; NearestSensedObject = sensedObject; nearest = sensedObject.Distance; if (o != sensedObject) { nearestSensedChanged?.Invoke(o, sensedObject); } } if (lastSenseTime != sensedObject.lastDetection) { newlySensedObjects.Add(sensedObject); } sensedObject.lastDetection = detectionTime; SensedObjects[sensedObject] = sensedObject; while (SensedObjects.Count > nearbySenseCapacity) { SensedObjects.RemoveAt(nearbySenseCapacity); } } if (newlySensedObjects.Count > 0) { OnSensedNewObjects(newlySensedObjects); } lastSenseTime = detectionTime; SensedObject old = NearestSensedObject; if (null != NearestSensedObject && Time.fixedTime - NearestSensedObject.lastDetection > timeToRetainNearest) { NearestSensedObject = null; nearest = float.PositiveInfinity; nearestSensedChanged?.Invoke(old, null); } }
/// <summary> /// Create a specific error message for the unresolved dependency. /// </summary> public static async Task <RestoreLogMessage> GetMessageAsync(IRestoreTargetGraph graph, LibraryRange unresolved, RemoteWalkContext context, ILogger logger, CancellationToken token) { // Default to using the generic unresolved error code, this will be overridden later. var code = NuGetLogCode.NU1100; var message = string.Empty; if (unresolved.TypeConstraintAllows(LibraryDependencyTarget.ExternalProject) && !unresolved.TypeConstraintAllows(LibraryDependencyTarget.Package)) { // Project // Check if the name is a path and if it exists. All project paths should have been normalized and converted to full paths before this. if (unresolved.Name.IndexOf(Path.DirectorySeparatorChar) > -1 && File.Exists(unresolved.Name)) { // File exists but the dg spec did not contain the spec code = NuGetLogCode.NU1105; message = string.Format(CultureInfo.CurrentCulture, Strings.Error_UnableToFindProjectInfo, unresolved.Name); } else { // Generic missing project error code = NuGetLogCode.NU1104; message = string.Format(CultureInfo.CurrentCulture, Strings.Error_ProjectDoesNotExist, unresolved.Name); } } else if (unresolved.TypeConstraintAllows(LibraryDependencyTarget.Package) && context.RemoteLibraryProviders.Count > 0) { // Package var range = unresolved.VersionRange ?? VersionRange.All; var sourceInfo = await GetSourceInfosForIdAsync(unresolved.Name, range, context, logger, token); var allVersions = new SortedSet <NuGetVersion>(sourceInfo.SelectMany(e => e.Value)); if (allVersions.Count == 0) { // No versions found code = NuGetLogCode.NU1101; var sourceList = string.Join(", ", sourceInfo.Select(e => e.Key.Name) .OrderBy(e => e, StringComparer.OrdinalIgnoreCase)); message = string.Format(CultureInfo.CurrentCulture, Strings.Error_NoPackageVersionsExist, unresolved.Name, sourceList); } else { // At least one version found var firstLine = string.Empty; var rangeString = range.ToNonSnapshotRange().PrettyPrint(); if (!IsPrereleaseAllowed(range) && HasPrereleaseVersionsOnly(range, allVersions)) { code = NuGetLogCode.NU1103; firstLine = string.Format(CultureInfo.CurrentCulture, Strings.Error_NoStablePackageVersionsExist, unresolved.Name, rangeString); } else { code = NuGetLogCode.NU1102; firstLine = string.Format(CultureInfo.CurrentCulture, Strings.Error_NoPackageVersionsExistInRange, unresolved.Name, rangeString); } var lines = new List <string>() { firstLine }; lines.AddRange(sourceInfo.Select(e => FormatSourceInfo(e, range))); message = DiagnosticUtility.GetMultiLineMessage(lines); } } else { // Unknown or non-specific. // Also shown when no sources exist. message = string.Format(CultureInfo.CurrentCulture, Strings.Log_UnresolvedDependency, unresolved.ToString(), graph.TargetGraphName); // Set again for clarity code = NuGetLogCode.NU1100; } return(RestoreLogMessage.CreateError(code, message, unresolved.Name, graph.TargetGraphName)); }
public override void Process(RFDataSetSinkSQLDomain domain) { try { var keys = new List <PropertyInfo>(); var data = new List <PropertyInfo>(); foreach (var propertyInfo in domain.DataSet.GetType().GetProperties()) { if (!propertyInfo.PropertyType.IsGenericType) { keys.Add(propertyInfo); } } foreach (var propertyInfo in domain.DataSet.GetRowType().GetProperties()) { if (!propertyInfo.PropertyType.IsGenericType && !keys.Any(p => p.Name == propertyInfo.Name)) { data.Add(propertyInfo); } } int insertedRows = 0; int deletedRows = 0; using (var connection = new SqlConnection(_config.DBConnection)) { connection.Open(); using (var transaction = connection.BeginTransaction()) { try { var knownColumns = new Dictionary <string, int>(); var selectSQLBuilder = String.Format("SELECT * FROM [{0}].[{1}]", _config.SchemaName, _config.TableName); using (var selectCommand = new SqlCommand(selectSQLBuilder, connection, transaction)) { using (var reader = selectCommand.ExecuteReader(System.Data.CommandBehavior.SchemaOnly)) { var schema = reader.GetSchemaTable(); foreach (DataRow row in schema.Rows) { if (!(bool)row["IsIdentity"]) { knownColumns.Add(row["ColumnName"].ToString(), (int)row["ColumnSize"]); } } } } // remove keys on unknown columns (UpdateTime etc.) keys.RemoveAll(k => !knownColumns.ContainsKey(k.Name)); var deleteSQLBuilder = new StringBuilder(String.Format("DELETE FROM [{0}].[{1}] WHERE ", _config.SchemaName, _config.TableName)); deleteSQLBuilder.Append(String.Join(" AND ", keys.Select(k => String.Format("[{0}] = @{0}", k.Name)))); var insertSQLBuilder = new StringBuilder(String.Format("INSERT INTO [{0}].[{1}] (", _config.SchemaName, _config.TableName)); insertSQLBuilder.Append(String.Join(", ", knownColumns.Select(k => String.Format("[{0}]", k.Key)))); insertSQLBuilder.Append(") VALUES ("); insertSQLBuilder.Append(String.Join(", ", knownColumns.Select(k => String.Format("@{0}", k.Key)))); insertSQLBuilder.Append(")"); using (var deleteCommand = new SqlCommand(deleteSQLBuilder.ToString(), connection, transaction)) { deleteCommand.CommandTimeout = 120; foreach (var key in keys) { deleteCommand.Parameters.AddWithValue(String.Format("@{0}", key.Name), ConvertValue(key.GetValue(domain.DataSet))); } deletedRows = deleteCommand.ExecuteNonQuery(); } foreach (var row in GenerateRows(domain.DataSet)) { using (var insertCommand = new SqlCommand(insertSQLBuilder.ToString(), connection, transaction)) { insertCommand.CommandTimeout = 120; var suppliedColumns = new SortedSet <string>(); foreach (var key in keys) { insertCommand.Parameters.AddWithValue(String.Format("@{0}", key.Name), ConvertValue(key.GetValue(domain.DataSet))); suppliedColumns.Add(key.Name); } foreach (var col in row) { if (knownColumns.ContainsKey(col.Key)) { insertCommand.Parameters.AddWithValue(String.Format("@{0}", col.Key), ConvertValue(col.Value, knownColumns[col.Key])); suppliedColumns.Add(col.Key); } } foreach (var nullColumn in knownColumns.Keys.Except(suppliedColumns)) { insertCommand.Parameters.AddWithValue(String.Format("@{0}", nullColumn), DBNull.Value); } insertedRows += insertCommand.ExecuteNonQuery(); } } transaction.Commit(); } catch (Exception ex) { throw new RFSystemException(this, ex, "Error sinking SQL dataset into table {0}", _config.TableName); } } } Log.Info("SQL sink deleted {0} rows, added {1} rows into Table {2}", deletedRows, insertedRows, _config.TableName); } catch (Exception ex) { throw new RFSystemException(this, ex, "Error sinking SQL dataset into table {0}", _config.TableName); } }
// Parsing needs to comply with these rules: https://github.com/ginatrapani/todo.txt-touch/wiki/Todo.txt-File-Format //TODO priority regex need to only recognice upper case single chars public Task(string raw) { raw = raw.Replace(Environment.NewLine, ""); //make sure it's just on one line raw = ParseDate(raw, DueRelativePattern); raw = ParseDate(raw, ThresholdRelativePattern); //Set Raw string after replacing relative date but before removing matches Raw = raw; // because we are removing matches as we go, the order we process is important. It must be: // - completed // - priority // - due date // - created date // - projects | contexts // What we have left is the body var reg = new Regex(CompletedPattern, RegexOptions.IgnoreCase); var s = reg.Match(raw).Value.Trim(); if (string.IsNullOrEmpty(s)) { Completed = false; CompletedDate = ""; } else { Completed = true; if (s.Length > 1) { CompletedDate = s.Substring(2); } } raw = reg.Replace(raw, ""); reg = new Regex(PriorityPattern, RegexOptions.IgnoreCase); Priority = reg.Match(raw).Groups["priority"].Value.Trim(); raw = reg.Replace(raw, ""); reg = new Regex(DueDatePattern); DueDate = reg.Match(raw).Groups["date"].Value.Trim(); raw = reg.Replace(raw, ""); reg = new Regex(ThresholdDatePattern); var match = reg.Match(raw); var @group = match.Groups["date"]; var value = @group.Value; ThresholdDate = value.Trim(); raw = reg.Replace(raw, ""); reg = new Regex(CreatedDatePattern); CreationDate = reg.Match(raw).Groups["date"].Value.Trim(); raw = reg.Replace(raw, ""); var ProjectSet = new SortedSet <string>(); reg = new Regex(ProjectPattern); var projects = reg.Matches(raw); PrimaryProject = null; int i = 0; foreach (Match project in projects) { var p = project.Groups["proj"].Value.Trim(); ProjectSet.Add(p); if (i == 0) { PrimaryProject = p; } i++; } Projects = ProjectSet.ToList <string>(); raw = reg.Replace(raw, ""); var ContextsSet = new SortedSet <string>(); reg = new Regex(ContextPattern); var contexts = reg.Matches(raw); PrimaryContext = null; i = 0; foreach (Match context in contexts) { var c = context.Groups["context"].Value.Trim(); ContextsSet.Add(c); if (i == 0) { PrimaryContext = c; } i++; } Contexts = ContextsSet.ToList <string>(); raw = reg.Replace(raw, ""); Body = raw.Trim(); }
private void analyse_voronoi() { //вычисляю принадлежность узлов сетки доменам (графовый алгоритм на базе структуры уровней смежности) SortedSet <int>[] adjncy = new SortedSet <int> [xf.Length]; for (int i = 0; i < xf.Length; i++) { adjncy[i] = new SortedSet <int>(); } Console.WriteLine("Разбиение пространства на домены"); Queue <int> queue = new Queue <int>(); domain = new int[grid.Node.Length]; double[] dist = new double[grid.Node.Length]; for (int i = 0; i < domain.Length; i++) { domain[i] = -1; dist[i] = double.PositiveInfinity; } for (int i = 0; i < xf.Length; i++) { int index; grid.ToIndex(xf[i], out index); dist[index] = distanceX(grid.Node[index], xf[i]); domain[index] = i; //setvalue(index, xf[i]); queue.Enqueue(index); } while (queue.Count > 0) { int index = queue.Dequeue(); int i = domain[index]; foreach (var adj in grid.Neighbours(index)) { double d = distanceX(grid.Node[adj], xf[i]); if (domain[adj] >= 0) { adjncy[domain[adj]].Add(i); adjncy[i].Add(domain[adj]); if (d < dist[adj]) { domain[adj] = i; dist[adj] = d; } continue; } domain[adj] = i; dist[adj] = d; //setvalue(adj, xf[i]); queue.Enqueue(adj); } } Console.WriteLine("Построение графа доменов"); //строю граф соседства доменов graph = new int[xf.Length][]; for (int i = 0; i < xf.Length; i++) { adjncy[i].Add(i); graph[i] = adjncy[i].ToArray(); } Console.WriteLine("Построение диграммы Вороного на сетке"); //уточняю домены (диаграмма вороного на сетке) for (int i = 0; i < grid.Node.Length; i++) { double[] xy = grid.Node[i]; int[] adj = graph[domain[i]]; double min = double.PositiveInfinity; for (int j = 0; j < adj.Length; j++) { double d = distanceX(xy, xf[adj[j]]); if (min > d) { min = d; domain[i] = adj[j]; } } } Console.WriteLine("Вычисление границ доменов"); //вычисляю границы доменов borderdist = new double[grid.Node.Length]; bordernear = new int[grid.Node.Length]; for (int i = 0; i < grid.Node.Length; i++) { borderdist[i] = double.PositiveInfinity; bordernear[i] = -1; int dom = domain[i]; foreach (var adj in grid.Neighbours(i)) { if (domain[adj] != dom) { borderdist[i] = 0; bordernear[i] = i; queue.Enqueue(i); break; } } } candidates = queue.ToArray(); //--------------------------TO REMOVE AFTER PROPER CLASSIFIER USAGE------------------------- //--------------------------------------Too silly example----------------------------------- // Classification is binary.. Shall we use more simplest binary classifier? Classifiers.LabeledData[] ldata = new Classifiers.LabeledData[3]; ldata[0] = new Classifiers.LabeledData(new double[3] { grid.Node[0][0], borderdist[0], bordernear[0] }, 1); ldata[1] = new Classifiers.LabeledData(new double[3] { grid.Node[1][0], borderdist[1], bordernear[1] }, 1); ldata[2] = new Classifiers.LabeledData(new double[3] { grid.Node[2][0], borderdist[2], bordernear[2] }, 0); Classifiers.IClassifier cls = new Classifiers.RandomForest(); Classifiers.RandomForestParams ps = new Classifiers.RandomForestParams(ldata, 3 /* samples count */, 3 /* features count */, 2 /* classes count */, 3 /* trees count */, 2 /* count of features to do split in a tree */, 0.7 /* percent of a training set of samples */ /* used to build individual trees. */); cls.train(ps); int[] y = new int[3]; cls.infer(ldata[0].data, out y[0]); cls.infer(ldata[1].data, out y[1]); cls.infer(ldata[2].data, out y[2]); for (int i = 0; i < 3; i++) { Console.WriteLine("{0} is predicted y[{1}] from trained data sample and {2} is ground truth", y[i], i, ldata[i].label); } double trainModelPrecision; cls.validate(ldata, out trainModelPrecision); Console.WriteLine("Model precision on training dataset: " + trainModelPrecision); //------------------------------------------------------------------------------------------ //--------------------------TO REMOVE AFTER PROPER CLASSIFIER USAGE------------------------- Console.WriteLine("Построение функции расстояний до границ доменов"); //вычисляю расстояния от границ while (queue.Count > 0) { int index = queue.Dequeue(); int dom = domain[index]; int brd = bordernear[index]; foreach (var adj in grid.Neighbours(index)) { if (domain[adj] != dom) { continue; } double d = distanceX(grid.Node[adj], grid.Node[brd]); if (bordernear[adj] >= 0) { if (d < borderdist[adj]) { bordernear[adj] = brd; borderdist[adj] = d; } continue; } bordernear[adj] = brd; borderdist[adj] = d; queue.Enqueue(adj); } } Console.WriteLine("Нормировка функции расстояний до границ доменов"); //нормирую расстояния от границ for (int i = 0; i < grid.Node.Length; i++) { int dom = domain[i]; int brd = bordernear[i]; double a = distanceX(grid.Node[i], xf[dom]); double b = distanceX(grid.Node[i], grid.Node[brd]); double c = a + b; borderdist[i] = (c == 0) ? 0 : b / c; } }
public void Initialize() { MyInt16 = 1; MyInt32 = 2; MyInt64 = 3; MyUInt16 = 4; MyUInt32 = 5; MyUInt64 = 6; MyByte = 7; MySByte = 8; MyChar = 'a'; MyString = "Hello"; MyBooleanTrue = true; MyBooleanFalse = false; MySingle = 1.1f; MyDouble = 2.2d; MyDecimal = 3.3m; MyDateTime = new DateTime(2019, 1, 30, 12, 1, 2, DateTimeKind.Utc); MyDateTimeOffset = new DateTimeOffset(2019, 1, 30, 12, 1, 2, new TimeSpan(1, 0, 0)); MyEnum = SampleEnum.Two; MyInt16Array = new short[] { 1 }; MyInt32Array = new int[] { 2 }; MyInt64Array = new long[] { 3 }; MyUInt16Array = new ushort[] { 4 }; MyUInt32Array = new uint[] { 5 }; MyUInt64Array = new ulong[] { 6 }; MyByteArray = new byte[] { 7 }; MySByteArray = new sbyte[] { 8 }; MyCharArray = new char[] { 'a' }; MyStringArray = new string[] { "Hello" }; MyBooleanTrueArray = new bool[] { true }; MyBooleanFalseArray = new bool[] { false }; MySingleArray = new float[] { 1.1f }; MyDoubleArray = new double[] { 2.2d }; MyDecimalArray = new decimal[] { 3.3m }; MyDateTimeArray = new DateTime[] { new DateTime(2019, 1, 30, 12, 1, 2, DateTimeKind.Utc) }; MyDateTimeOffsetArray = new DateTimeOffset[] { new DateTimeOffset(2019, 1, 30, 12, 1, 2, new TimeSpan(1, 0, 0)) }; MyEnumArray = new SampleEnum[] { SampleEnum.Two }; MyInt16TwoDimensionArray = new int[2][]; MyInt16TwoDimensionArray[0] = new int[] { 10, 11 }; MyInt16TwoDimensionArray[1] = new int[] { 20, 21 }; MyInt16TwoDimensionList = new List <List <int> >(); MyInt16TwoDimensionList.Add(new List <int> { 10, 11 }); MyInt16TwoDimensionList.Add(new List <int> { 20, 21 }); MyInt16ThreeDimensionArray = new int[2][][]; MyInt16ThreeDimensionArray[0] = new int[2][]; MyInt16ThreeDimensionArray[1] = new int[2][]; MyInt16ThreeDimensionArray[0][0] = new int[] { 11, 12 }; MyInt16ThreeDimensionArray[0][1] = new int[] { 13, 14 }; MyInt16ThreeDimensionArray[1][0] = new int[] { 21, 22 }; MyInt16ThreeDimensionArray[1][1] = new int[] { 23, 24 }; MyInt16ThreeDimensionList = new List <List <List <int> > >(); var list1 = new List <List <int> >(); MyInt16ThreeDimensionList.Add(list1); list1.Add(new List <int> { 11, 12 }); list1.Add(new List <int> { 13, 14 }); var list2 = new List <List <int> >(); MyInt16ThreeDimensionList.Add(list2); list2.Add(new List <int> { 21, 22 }); list2.Add(new List <int> { 23, 24 }); MyStringList = new List <string>() { "Hello" }; MyStringIEnumerableT = new string[] { "Hello" }; MyStringIListT = new string[] { "Hello" }; MyStringICollectionT = new string[] { "Hello" }; MyStringIReadOnlyCollectionT = new string[] { "Hello" }; MyStringIReadOnlyListT = new string[] { "Hello" }; MyStringToStringDict = new Dictionary <string, string> { { "key", "value" } }; MyStringToStringIDict = new Dictionary <string, string> { { "key", "value" } }; MyStringToStringIReadOnlyDict = new Dictionary <string, string> { { "key", "value" } }; MyStringStackT = new Stack <string>(new List <string>() { "Hello", "World" }); MyStringQueueT = new Queue <string>(new List <string>() { "Hello", "World" }); MyStringHashSetT = new HashSet <string>(new List <string>() { "Hello" }); MyStringLinkedListT = new LinkedList <string>(new List <string>() { "Hello" }); MyStringSortedSetT = new SortedSet <string>(new List <string>() { "Hello" }); MyStringIImmutableListT = ImmutableList.CreateRange(new List <string> { "Hello" }); MyStringIImmutableStackT = ImmutableStack.CreateRange(new List <string> { "Hello" }); MyStringIImmutableQueueT = ImmutableQueue.CreateRange(new List <string> { "Hello" }); MyStringIImmutableSetT = ImmutableHashSet.CreateRange(new List <string> { "Hello" }); MyStringImmutableHashSetT = ImmutableHashSet.CreateRange(new List <string> { "Hello" }); MyStringImmutableListT = ImmutableList.CreateRange(new List <string> { "Hello" }); MyStringImmutableStackT = ImmutableStack.CreateRange(new List <string> { "Hello" }); MyStringImmutablQueueT = ImmutableQueue.CreateRange(new List <string> { "Hello" }); MyStringImmutableSortedSetT = ImmutableSortedSet.CreateRange(new List <string> { "Hello" }); MyListOfNullString = new List <string> { null }; }
static void Solve() { string s = Console.ReadLine(); string[] sa = s.Split(' '); int cnt = int.Parse(sa[0]); long[] V = new Int64[cnt]; // long === Int64 for (int i = 1; i <= cnt; ++i) { V[i - 1] = long.Parse(sa[i]); } Array.Sort(V); Dictionary <long, int> M = new Dictionary <long, int>(); // suma --> index elementu w V M[0] = -1; for (int i = 0; i < cnt; ++i) { long w = V[i]; Dictionary <long, int> M2 = new Dictionary <long, int>(M); foreach (KeyValuePair <long, int> d in M2) { if (d.Value == i) { continue; } long w2 = d.Key + w; if (!M2.ContainsKey(w2)) { M[w2] = i; } else { // match! do w2 można dotrzeć albo z w2-V[M[ws2]] albo z w2-V[i] int j; long k; SortedSet <int> s1 = new SortedSet <int>(), s2 = new SortedSet <int>(); // s1 k = w2; j = M[w2]; while (j >= 0) { s1.Add(j); k -= V[j]; j = M[k]; } // s2 k = w2; j = i; while (j >= 0) { s2.Add(j); k -= V[j]; j = M[k]; } // diff SortedSet <int> sb = new SortedSet <int>(s1); sb.IntersectWith(s2); s1.ExceptWith(sb); s2.ExceptWith(sb); // out foreach (int a in s1) { Console.Write("{0} ", V[a]); } Console.WriteLine(); foreach (int a in s2) { Console.Write("{0} ", V[a]); } Console.WriteLine(); return; } } } Console.WriteLine("Impossible"); }
private SortedSet <Completion> Selected_Completions(bool useCapitals, ISet <AsmTokenType> selectedTypes) { SortedSet <Completion> completions = new SortedSet <Completion>(new CompletionComparer()); //Add the completions of AsmDude directives (such as code folding directives) #region if (Settings.Default.CodeFolding_On) { { string insertionText = Settings.Default.CodeFolding_BeginTag; //the characters that start the outlining region string description = insertionText + " - keyword to start code folding"; completions.Add(new Completion(description, insertionText, null, this._icons[AsmTokenType.Directive], "")); } { string insertionText = Settings.Default.CodeFolding_EndTag; //the characters that end the outlining region string description = insertionText + " - keyword to end code folding"; completions.Add(new Completion(description, insertionText, null, this._icons[AsmTokenType.Directive], "")); } } #endregion AssemblerEnum usedAssember = AsmDudeToolsStatic.Used_Assembler; #region if (selectedTypes.Contains(AsmTokenType.Mnemonic)) { ISet <Arch> selectedArchs = AsmDudeToolsStatic.Get_Arch_Swithed_On(); IList <Mnemonic> allowedMnemonics = Get_Allowed_Mnemonics(selectedArchs); //AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:selectedCompletions; allowedMnemonics.Count=" + allowedMnemonics.Count + "; selectedArchs="+ArchTools.ToString(selectedArchs)); foreach (Mnemonic mnemonic in allowedMnemonics) { string keyword = mnemonic.ToString(); string insertionText = (useCapitals) ? keyword : keyword.ToLower(); string archStr = ArchTools.ToString(this._asmDudeTools.Mnemonic_Store.GetArch(mnemonic)); string descriptionStr = this._asmDudeTools.Mnemonic_Store.GetDescription(mnemonic); descriptionStr = (descriptionStr.Length == 0) ? "" : " - " + descriptionStr; String displayText = keyword + archStr + descriptionStr; //String description = keyword.PadRight(15) + archStr.PadLeft(8) + descriptionStr; this._icons.TryGetValue(AsmTokenType.Mnemonic, out var imageSource); completions.Add(new Completion(displayText, insertionText, null, imageSource, "")); } } //Add the completions that are defined in the xml file foreach (string keyword in this._asmDudeTools.Get_Keywords()) { AsmTokenType type = this._asmDudeTools.Get_Token_Type(keyword); if (selectedTypes.Contains(type)) { Arch arch = Arch.NONE; bool selected = true; if (type == AsmTokenType.Directive) { AssemblerEnum assembler = this._asmDudeTools.Get_Assembler(keyword); if (assembler.HasFlag(AssemblerEnum.MASM)) { if (!usedAssember.HasFlag(AssemblerEnum.MASM)) { selected = false; } } else if (assembler.HasFlag(AssemblerEnum.NASM)) { if (!usedAssember.HasFlag(AssemblerEnum.NASM)) { selected = false; } } } else { arch = this._asmDudeTools.Get_Architecture(keyword); selected = AsmDudeToolsStatic.Is_Arch_Switched_On(arch); } AsmDudeToolsStatic.Output_INFO("CodeCompletionSource:Selected_Completions; keyword=" + keyword + "; arch=" + arch + "; selected=" + selected); if (selected) { //Debug.WriteLine("INFO: CompletionSource:AugmentCompletionSession: name keyword \"" + entry.Key + "\""); // by default, the entry.Key is with capitals string insertionText = (useCapitals) ? keyword : keyword.ToLower(); string archStr = (arch == Arch.NONE) ? "" : " [" + ArchTools.ToString(arch) + "]"; string descriptionStr = this._asmDudeTools.Get_Description(keyword); descriptionStr = (descriptionStr.Length == 0) ? "" : " - " + descriptionStr; String displayText = keyword + archStr + descriptionStr; //String description = keyword.PadRight(15) + archStr.PadLeft(8) + descriptionStr; this._icons.TryGetValue(type, out var imageSource); completions.Add(new Completion(displayText, insertionText, null, imageSource, "")); } } } #endregion return(completions); }
public Library(params Book[] books) { this.books = new SortedSet <Book>(books); }
private SortedSet <Completion> Mnemonic_Operand_Completions(bool useCapitals, ISet <AsmSignatureEnum> allowedOperands) { SortedSet <Completion> completions = new SortedSet <Completion>(new CompletionComparer()); foreach (string keyword in this._asmDudeTools.Get_Keywords()) { Arch arch = this._asmDudeTools.Get_Architecture(keyword); AsmTokenType type = this._asmDudeTools.Get_Token_Type(keyword); bool selected = AsmDudeToolsStatic.Is_Arch_Switched_On(arch); //AsmDudeToolsStatic.Output("INFO: AsmCompletionSource:mnemonicOperandCompletions; keyword=" + keyword +"; selected="+selected +"; arch="+arch); if (selected) { switch (type) { case AsmTokenType.Register: { //AsmDudeToolsStatic.Output("INFO: AsmCompletionSource:mnemonicOperandCompletions; rn=" + keyword); Rn regName = RegisterTools.ParseRn(keyword); if (AsmSignatureTools.Is_Allowed_Reg(regName, allowedOperands)) { //AsmDudeToolsStatic.Output(string.Format("INFO: AsmCompletionSource:mnemonicOperandCompletions; rn="+ keyword + " is selected")); } else { selected = false; } break; } case AsmTokenType.Misc: { if (AsmSignatureTools.Is_Allowed_Misc(keyword, allowedOperands)) { //AsmDudeToolsStatic.Output(string.Format("INFO: AsmCompletionSource:mnemonicOperandCompletions; rn="+ keyword + " is selected")); } else { selected = false; } break; } default: { selected = false; break; } } } if (selected) { //AsmDudeToolsStatic.Output("INFO: AsmCompletionSource:AugmentCompletionSession: keyword \"" + keyword + "\" is added to the completions list"); // by default, the entry.Key is with capitals string insertionText = (useCapitals) ? keyword : keyword.ToLower(); string archStr = (arch == Arch.NONE) ? "" : " [" + ArchTools.ToString(arch) + "]"; string descriptionStr = this._asmDudeTools.Get_Description(keyword); descriptionStr = (descriptionStr.Length == 0) ? "" : " - " + descriptionStr; String displayText = keyword + archStr + descriptionStr; //String description = keyword.PadRight(15) + archStr.PadLeft(8) + descriptionStr; this._icons.TryGetValue(type, out var imageSource); completions.Add(new Completion(displayText, insertionText, null, imageSource, "")); } } return(completions); }
List <double> m_gamma; //step sizes /// <summary> /// First Lars model, (includes intercept) /// </summary> /// <param name="x"></param> /// <param name="y"></param> public Lars(double[,] x, double[] y) { bool lasso = false; int n = x.GetLength(0); //number of observations int p = x.GetLength(1); //number of variables int maxvars = System.Math.Min(n - 1, p); //maximum number of variables int maxit = 8 * maxvars; m_beta = new List <double[]>(); // this are coefficients on each step m_gamma = new List <double>(); SortedSet <int> c_set = new SortedSet <int>(); //candidate set SortedSet <int> a_set = new SortedSet <int>(); //active set //initialize candidate set with all available variables for (int i = 0; i < p; i++) { c_set.Add(i); } //compute gramm matrix (gram = x' * x) Linalg.Matrix full_gram = new Linalg.Matrix(p, p); for (int i = 0; i < p; i++) { for (int j = 0; j < p; j++) { double sum = 0.0; for (int k = 0; k < n; k++) { sum += x[k, i] * x[k, j]; } full_gram[i, j] = sum; } } bool stop_flag = false; double[] mu = new double[n]; //lars regression vector double[] c = new double[p]; //correlations for (int it = 0; it < maxit; it++) { //compute correlations for (int j = 0; j < p; j++) { double sum = 0.0; for (int i = 0; i < n; i++) { sum += x[i, j] * (y[i] - mu[i]); } c[j] = sum; } //find abs max corr from candidate set double max_abs_c = 0.0; int max_abs_c_index = -1; foreach (int i in c_set) { double abs_c = System.Math.Abs(c[i]); if (abs_c > max_abs_c) { max_abs_c = abs_c; max_abs_c_index = i; } } //exit if there is no correlation if (max_abs_c < ACQ.Math.Const.epsilon) { break; } a_set.Add(max_abs_c_index); c_set.Remove(max_abs_c_index); int vars = a_set.Count; double[] s = new double[vars]; foreach (int i in a_set) { s[i] = ACQ.Math.Utils.Sign(c[i]); } //compute partical Gram matrix, Gram = X(active_columns)' * X(active_columns) int[] active_indices = a_set.ToArray(); Linalg.Matrix gram = full_gram.Submatrix(active_indices, active_indices); Linalg.CholeskyDecomposition gram_chol = new Linalg.CholeskyDecomposition(gram); Linalg.Matrix inv_gram = gram_chol.Solve(s); //compute coefficients of equiangular vector double[] w = new double[vars]; double norm = 0.0; for (int i = 0; i < vars; i++) { w[i] = s[i] * inv_gram[i, 0]; norm += w[i]; } double scale = 1.0 / System.Math.Sqrt(norm); for (int i = 0; i < vars; i++) { w[i] = scale * w[i]; } //compute equiangular vector double[] u = new double[n]; for (int i = 0; i < n; i++) { double sum = 0.0; for (int j = 0; j < vars; j++) { sum += x[i, active_indices[j]] * w[j]; } u[i] = sum; } double gamma = max_abs_c / scale; // set gamma to the largest value (i.e. use regular least squares) //correlation (angle) between equiangular vector and all remaining variables foreach (int i in c_set) { double angle = 0.0; for (int j = 0; j < n; j++) { angle += x[j, i] * u[j]; } double t1 = (max_abs_c - c[i]) / (scale - angle); double t2 = (max_abs_c + c[i]) / (scale + angle); if (t1 > 0) { gamma = System.Math.Min(t1, gamma); } if (t2 > 0) { gamma = System.Math.Min(t2, gamma); } } //LASSO code here if (lasso) { } //update coefficients double[] beta = new double[p]; if (m_beta.Count > 0) { double[] pev_beta = m_beta[m_beta.Count - 1]; for (int i = 0; i < vars; i++) { int index = active_indices[i]; beta[index] = pev_beta[index]; } } for (int i = 0; i < vars; i++) { beta[active_indices[i]] += gamma * w[i]; } m_beta.Add(beta); m_gamma.Add(gamma); //update lars vector for (int i = 0; i < n; i++) { mu[i] += gamma * u[i]; } } }
public static int Size <T>(SortedSet <T> x) { Contracts.AssertValueOrNull(x); return(x == null ? 0 : x.Count); }
public void AugmentCompletionSession(ICompletionSession session, IList <CompletionSet> completionSets) { //AsmDudeToolsStatic.Output(string.Format("INFO: {0}:AugmentCompletionSession", this.ToString())); if (this._disposed) { return; } if (!Settings.Default.CodeCompletion_On) { return; } try { DateTime time1 = DateTime.Now; ITextSnapshot snapshot = this._buffer.CurrentSnapshot; SnapshotPoint triggerPoint = (SnapshotPoint)session.GetTriggerPoint(snapshot); if (triggerPoint == null) { return; } ITextSnapshotLine line = triggerPoint.GetContainingLine(); //1] check if current position is in a remark; if we are in a remark, no code completion #region if (triggerPoint.Position > 1) { char currentTypedChar = (triggerPoint - 1).GetChar(); //AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:AugmentCompletionSession: current char = "+ currentTypedChar); if (!currentTypedChar.Equals('#')) { //TODO UGLY since the user can configure this starting character int pos = triggerPoint.Position - line.Start; if (AsmSourceTools.IsInRemark(pos, line.GetText())) { //AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:AugmentCompletionSession: currently in a remark section"); return; } else { // AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:AugmentCompletionSession: not in a remark section"); } } } #endregion //2] find the start of the current keyword #region SnapshotPoint start = triggerPoint; while ((start > line.Start) && !AsmTools.AsmSourceTools.IsSeparatorChar((start - 1).GetChar())) { start -= 1; } #endregion //3] get the word that is currently being typed #region ITrackingSpan applicableTo = snapshot.CreateTrackingSpan(new SnapshotSpan(start, triggerPoint), SpanTrackingMode.EdgeInclusive); string partialKeyword = applicableTo.GetText(snapshot); bool useCapitals = AsmDudeToolsStatic.Is_All_Upper(partialKeyword); SortedSet <Completion> completions = null; string lineStr = line.GetText(); var t = AsmSourceTools.ParseLine(lineStr); Mnemonic mnemonic = t.Item2; //AsmDudeToolsStatic.Output_INFO("CodeCompletionSource:AugmentCompletionSession; lineStr="+ lineStr+ "; t.Item1="+t.Item1); string previousKeyword = AsmDudeToolsStatic.Get_Previous_Keyword(line.Start, start).ToUpper(); if (mnemonic == Mnemonic.UNKNOWN) { //AsmDudeToolsStatic.Output_INFO("CodeCompletionSource:AugmentCompletionSession; lineStr=" + lineStr + "; previousKeyword=" + previousKeyword); if (previousKeyword.Equals("INVOKE")) //TODO INVOKE is a MASM keyword not a NASM one... { // Suggest a label completions = Label_Completions(); } else { ISet <AsmTokenType> selected = new HashSet <AsmTokenType> { AsmTokenType.Directive, AsmTokenType.Jump, AsmTokenType.Misc, AsmTokenType.Mnemonic }; completions = Selected_Completions(useCapitals, selected); } } else { // the current line contains a mnemonic //AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:AugmentCompletionSession; mnemonic=" + mnemonic+ "; previousKeyword="+ previousKeyword); if (AsmSourceTools.IsJump(AsmSourceTools.ParseMnemonic(previousKeyword))) { //AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:AugmentCompletionSession; previous keyword is a jump mnemonic"); // previous keyword is jump (or call) mnemonic. Suggest "SHORT" or a label completions = Label_Completions(); completions.Add(new Completion("SHORT", (useCapitals) ? "SHORT" : "short", null, this._icons[AsmTokenType.Misc], "")); completions.Add(new Completion("NEAR", (useCapitals) ? "NEAR" : "near", null, this._icons[AsmTokenType.Misc], "")); } else if (previousKeyword.Equals("SHORT") || previousKeyword.Equals("NEAR")) { // Suggest a label completions = Label_Completions(); } else { IList <Operand> operands = AsmSourceTools.MakeOperands(t.Item3); ISet <AsmSignatureEnum> allowed = new HashSet <AsmSignatureEnum>(); int commaCount = AsmSignature.Count_Commas(lineStr); IList <AsmSignatureElement> allSignatures = this._asmDudeTools.Mnemonic_Store.GetSignatures(mnemonic); ISet <Arch> selectedArchitectures = AsmDudeToolsStatic.Get_Arch_Swithed_On(); foreach (AsmSignatureElement se in AsmSignatureHelpSource.Constrain_Signatures(allSignatures, operands, selectedArchitectures)) { if (commaCount < se.Operands.Count) { foreach (AsmSignatureEnum s in se.Operands[commaCount]) { allowed.Add(s); } } } completions = Mnemonic_Operand_Completions(useCapitals, allowed); } } //AsmDudeToolsStatic.Output("INFO: CodeCompletionSource:AugmentCompletionSession; nCompletions=" + completions.Count); #endregion completionSets.Add(new CompletionSet("Tokens", "Tokens", applicableTo, completions, Enumerable.Empty <Completion>())); AsmDudeToolsStatic.Print_Speed_Warning(time1, "Code Completion"); } catch (Exception e) { AsmDudeToolsStatic.Output_ERROR(string.Format("{0}:AugmentCompletionSession; e={1}", ToString(), e.ToString())); } }