public void SecondAdd() { Set<int> set = new Set<int>(); set.Add(1); set.Add(2); Assert.AreEqual(2, set.Count); }
/*************************/ // Selecting initial vertices /*************************/ // Returns a vertex on the last level of a Breadth-first search (BFS) public static int BFS(Graph graph, int init) { // BFS working queue Queue<int> queue = new Queue<int>(); // Vertices that have already been visited Set<int> visited = new Set<int>(); // Initial vertex is given as input visited.Add(init); queue.Enqueue(init); int current = init; // While there is still a vertex in the queue... while (queue.Count > 0) { //... select the first vertex and process it current = queue.Dequeue(); foreach (int w in graph.OpenNeighborhood(current)) { // Enqueue all neighbors that have not been processed yet if (!visited.Contains(w)) { visited.Add(w); queue.Enqueue(w); } } } // This is the last vertex that has been processed, thus a vertex that is on the last level of the BFS search return current; }
public static void Main() { var set1 = new Set<string>(); var set2 = new Set<string>(); set1.Add("one"); set1.Add("two"); set1.Add("three"); set2.Add("three"); set2.Add("four"); Console.WriteLine(set1); Console.WriteLine(set2); Console.WriteLine(set1.Intersect(set2)); Console.WriteLine(set2.Intersect(set1)); Console.WriteLine(set1.Union(set2)); Console.WriteLine(set2.Union(set1)); set1.Remove("five"); set1.Remove("two"); Console.WriteLine(set1); Console.WriteLine(set1.Find("one")); Console.WriteLine(set1.Find("two")); Console.WriteLine(set1.Count); }
public void AddPositiveTest() { ISet<string> sets = new Set<string>(); sets.Add("item1"); sets.Add("item2"); Assert.AreEqual(2, sets.Count); }
Set<string> GenerateParens2(int remaining) { Set<string> set = new Set<string>(); if (remaining == 0) { set.Add(""); } else { Set<string> prev = GenerateParens2(remaining - 1); foreach (string str in prev) { for (int i = 0; i < str.Length; i++) { if (str[i] == '(') { string s = InsertInside(str, i); /* Add s to set if it is not already in there. Note: * Set automatically checks for duplicates before * adding, so an explicit check is not necessary. */ set.Add(s); } } set.Add("()" + str); } } return set; }
static void Main() { var setOne = new Set<int>(); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(5); setOne.Add(6); var setTwo = new Set<int>(); setTwo.Add(5); setTwo.Add(10); Console.WriteLine(setOne); Console.WriteLine(setTwo); var union = setOne.Union(setTwo); Console.WriteLine(); Console.WriteLine(union); var intersect = setOne.Intersect(setTwo); Console.WriteLine(intersect); }
internal static ISet ToSet(this IFullTextSearchResult result, String matchVar, String scoreVar) { Set s = new Set(); if (matchVar != null) s.Add(matchVar, result.Node); if (scoreVar != null) s.Add(scoreVar, result.Score.ToLiteral(_factory)); return s; }
/// <summary> /// Evaluates the Select Distinct Graphs optimisation /// </summary> /// <param name="context">Evaluation Context</param> /// <returns></returns> public BaseMultiset Evaluate(SparqlEvaluationContext context) { context.OutputMultiset = new Multiset(); String var; if (context.Query != null) { var = context.Query.Variables.First(v => v.IsResultVariable).Name; } else { var = this._graphVar; } foreach (Uri graphUri in context.Data.GraphUris) { Set s = new Set(); if (graphUri == null) { s.Add(var, null); } else { s.Add(var, new UriNode(null, graphUri)); } context.OutputMultiset.Add(s); } return context.OutputMultiset; }
public void AddWithDelegatePositiveTest() { ISet<string> sets = new Set<string>(); sets.Add("item1"); sets.Add("item22", (i,j) => i.Length == j.Length); Assert.AreEqual(2, sets.Count); }
static void Main(string[] args) { Set a = new Set(); a.Add(1); a.Add(2); a.Add(3); Console.WriteLine("a : " + a); Set b = new Set(); b.Add(2); b.Add(3); Console.WriteLine("b : " + b); Set c = new Set(); c = a.Union(b); Console.WriteLine("a.Union(b) : " + c); c = a.Intersection(b); Console.WriteLine("a.Intersection(b) : " + c); c = a.Difference(b); Console.WriteLine("a.Difference(b) : " + c); bool flag; flag = a.IsSubsetOf(b); Console.WriteLine("a.IsSubsetOf(b) : " + flag); flag = b.IsSubsetOf(a); Console.WriteLine("b.IsSubsetOf(a) : " + flag); Console.Read(); }
public void TestIfAddAddsACorrectAmount() { var set = new Set<int>(); set.Add(5); set.Add(6); Assert.AreEqual(2, set.Count); }
public void ShouldClear() { var collection = new Set<string>(); collection.Add("1"); collection.Add("2"); collection.Clear(); Assert.AreEqual(0, collection.Count); }
public void RemoveTest() { Set<int> set = new Set<int>(); set.Add(1); set.Add(2); set.Remove(1); Assert.IsFalse(set.Contains(1)); Assert.IsTrue(set.Contains(2)); }
public void TestDel() { var mySet = new Set<int>(); mySet.Add(1); mySet.Add(2); mySet.Del(2); Assert.IsTrue(mySet.IsExist(1)); Assert.IsTrue(!mySet.IsExist(2)); }
public void DefaultSet() { var set = new Set<string>(); set.Add("A"); set.Add("B"); set.Add("a"); Assert.AreEqual<int>(3, set.Count, "The number of items in the set are not correct."); }
public void CaseInsensitiveSet() { var set = new Set<string>(StringComparer.InvariantCultureIgnoreCase); set.Add("A"); set.Add("B"); set.Add("a"); Assert.AreEqual<int>(2, set.Count, "The number of items in the set are not correct."); }
public void AddArrayToNonEmptySet() { var set = new Set<int>(); set.Add(11); set.Add(22); set.Add(new [] { 33, 44, 55 }); Assert.Equals(5, set.Count); Assert.True(set.Contains(11)); Assert.True(set.Contains(44)); }
public void ShouldAdd() { var collection = new Set<string>(); collection.Add("1"); collection.Add("2"); Assert.AreEqual(2, collection.Count); Assert.AreEqual("1", collection.First()); Assert.AreEqual("2", collection.Last()); }
public void ContainsStoryTwoAddsHaveDifferentTimes() { var set = new Set<Story>(); var story = new Story(new FrontpageItem { Title = "title", Link = "blah", Description = "shouldnt matter" }); var story2 = new Story(new FrontpageItem { Title = "title", Link = "blah", Description = "diff but irrelevant" }); set.Add(story); set.Add(story2); Assert.True(set.Contains(story)); Assert.True(set.Contains(story2)); }
public void ShouldRemove() { var collection = new Set<string>(); collection.Add("1"); collection.Add("2"); collection.Remove("1"); collection.Remove(null); Assert.AreEqual(1, collection.Count); Assert.AreEqual("2", collection.First()); }
public void TestUnion() { var mySet1 = new Set<int>(); mySet1.Add(1); mySet1.Add(2); var mySet2 = new Set<int>(); mySet2.Add(3); mySet1.UnionOfSets(mySet2); Assert.IsTrue(mySet1.IsExist(1) && mySet1.IsExist(2) && mySet1.IsExist(3)); }
public void ShouldContains() { var collection = new Set<string>(); collection.Add("1"); collection.Add("2"); collection.Add(null); // Ignored Assert.IsTrue(collection.Contains("1")); Assert.IsFalse(collection.Contains("Hello")); Assert.IsFalse(collection.Contains(null)); }
public void ShouldNotAddNulls() { var collection = new Set<string>(); collection.Add(null); collection.Add("1"); collection.Add(null); collection.Add(null); Assert.AreEqual(1, collection.Count); Assert.AreEqual("1", collection.First()); }
public void TestIfFindFindsTheValue() { var set = new Set<int>(); set.Add(5); set.Add(6); Assert.AreEqual(true, set.Contains(5)); Assert.AreEqual(5, set.Find(5)); Assert.AreEqual(false, set.Contains(7)); }
public void TestPick() { var con = new Set<int> (); con.Add (1); con.Add (2); con.Add (3); var picked = con.Pick (); Assert.True (1 <= picked && picked < 4); }
public void Add_SimpleValues_AddedSuccessful() { var set = new Set<int>(); var expectedSet = new Set<int>(new[] { 1, 2 }); set.Add(1); set.Add(2); set.Add(2); Assert.Equal(expectedSet, set); }
public void ShouldNotAddDuplicateValues() { var set = new Set<int>(); set.Add(1); set.Add(1); set.Add(1); set.Add(1); set.Add(1); Assert.AreEqual(1, set.Count); }
public void ShouldCorrectlyAddAllValues() { var set = new Set<int>(); set.Add(1); set.Add(3); set.Add(2); set.Add(7); set.Add(8); Assert.AreEqual(5, set.Count); }
public void Remove() { Set<string> set = new Set<string>(); set.Add("olof"); set.Add("bjarnason"); set.Remove("djdi"); Assert.AreEqual(2, set.Count); set.Remove("olof"); Assert.AreEqual(1, set.Count); Assert.IsTrue(set.Contains("bjarnason")); }
public void ShouldThrowIfSameThreadModifiesCollectionWhileEnumerating() { var collection = new Set<string>(); collection.Add("1"); collection.Add("2"); var enumerator = collection.GetEnumerator(); enumerator.MoveNext(); collection.Add("3"); Assert.Throws<InvalidOperationException>(() => enumerator.MoveNext()); enumerator.Dispose(); }
public TEntity Add(TEntity entity) { return(Set.Add(entity).Entity); }
/// <summary> /// Gets the shortest path from the current synset to another, following the given synset relations. /// </summary> /// <param name="destination">Destination synset</param> /// <param name="relations">Relations to follow, or null for all relations.</param> /// <returns>Synset path, or null if none exists.</returns> public List <SynSet> GetShortestPathTo(SynSet destination, IEnumerable <WordNetEngine.SynSetRelation> relations) { if (relations == null) { relations = Enum.GetValues(typeof(WordNetEngine.SynSetRelation)) as WordNetEngine.SynSetRelation[]; } // make sure the backpointer on the current synset is null - can't predict what other functions might do _searchBackPointer = null; // avoid cycles Set <SynSet> synsetsEncountered = new Set <SynSet>(); synsetsEncountered.Add(this); // start search queue Queue <SynSet> searchQueue = new Queue <SynSet>(); searchQueue.Enqueue(this); // run search List <SynSet> path = null; while (searchQueue.Count > 0 && path == null) { SynSet currSynSet = searchQueue.Dequeue(); // see if we've finished the search if (currSynSet == destination) { // gather synsets along path path = new List <SynSet>(); while (currSynSet != null) { path.Add(currSynSet); currSynSet = currSynSet.SearchBackPointer; } // reverse for the correct order path.Reverse(); } // expand the search one level else { foreach (SynSet synset in currSynSet.GetRelatedSynSets(relations, false)) { if (!synsetsEncountered.Contains(synset)) { synset.SearchBackPointer = currSynSet; searchQueue.Enqueue(synset); synsetsEncountered.Add(synset); } } } } // null-out all search backpointers foreach (SynSet synset in synsetsEncountered) { synset.SearchBackPointer = null; } return(path); }
/// <summary> /// Adds the specified entity to the current <see cref="EntityFrameworkRepository{TEntity, TContext}" />. /// </summary> /// <param name="entity"> /// The entity to add. /// </param> /// <param name="controlToken"> /// A token that ensures thread safety for the operation. /// </param> protected override void Add(TEntity entity, ConcurrencyControlToken controlToken) => Set.Add(entity);
public Set <VariableEffect> ComputePathsFromVariableToHeapLocation(Set <IPTAnalysisNode> ns, Label lb) { Set <VariableEffect> variableEffects = new Set <VariableEffect>(); //if (WriteEffects.HasWriteEffects) // Console.Out.WriteLine("Modifies:"); // Traverse every write effect foreach (IPTAnalysisNode n in ns) { // Get the fields that are backward reachable from the modified field Set <List <Edge> > paths = PointsToGraph.DFSPathFrom(n, false, true, true); foreach (List <Edge> currentPath in paths) { currentPath.Reverse(); IPTAnalysisNode rootNode; if (currentPath.Count > 0) { rootNode = currentPath[0].Src; } else { rootNode = n; } Variable v = null; if (rootNode.IsVariableReference) { IVarRefNode vrNode = rootNode as IVarRefNode; v = vrNode.ReferencedVariable; if (!IsLocal(v)) { continue; } //if (!(v is Parameter) && !v.Equals(PTGraph.GlobalScope)) // continue; } if (rootNode.Equals(GNode.nGBL)) { v = PTGraph.GlobalScope; } /* * if (rootNode.IsParameterNode && ((PNode)rootNode).IsByValue) * { * bool fieldUpdate = !n.Field.Equals(PTGraph.asterisk); * foreach (Edge e in currentPath) * fieldUpdate = fieldUpdate || !e.Field.Equals(PTGraph.asterisk); * if (!fieldUpdate) * continue; * } */ string nodeName = rootNode.Name; if (v != null) { VariableEffect vEffect = new VariableEffect(v, lb); foreach (Edge e in currentPath) { if (!e.Field.Equals(PTGraph.asterisk)) { vEffect.AddField(e.Field); // lastField = e.Field; } } variableEffects.Add(vEffect); } } } return(variableEffects); }
/// <summary> /// Evaluates a LET assignment in the given Evaluation Context. /// </summary> /// <param name="context">Evaluation Context.</param> public override void Evaluate(SparqlEvaluationContext context) { if (context.InputMultiset is NullMultiset) { context.OutputMultiset = context.InputMultiset; } else if (context.InputMultiset is IdentityMultiset) { Set s = new Set(); try { INode temp = _expr.Evaluate(context, 0); s.Add(_var, temp); context.OutputMultiset.Add(s); } catch { // No assignment if there's an error } } else { foreach (int id in context.InputMultiset.SetIDs.ToList()) { ISet s = context.InputMultiset[id]; if (s.ContainsVariable(_var)) { try { // A value already exists so see if the two values match INode current = s[_var]; INode temp = _expr.Evaluate(context, id); if (current != temp) { // Where the values aren't equal the solution is eliminated context.InputMultiset.Remove(id); } } catch { // If an error occurs the solution is eliminated context.InputMultiset.Remove(id); } } else { context.InputMultiset.AddVariable(_var); try { // Make a new assignment INode temp = _expr.Evaluate(context, id); s.Add(_var, temp); } catch { // If an error occurs no assignment happens } } } context.OutputMultiset = new IdentityMultiset(); } }
/// <summary> /// Override method to handle launching of a chooser for selecting lexical entries. /// </summary> protected override void HandleChooser() { string displayWs = "analysis vernacular"; string postDialogMessageTrigger = null; if (m_configurationNode != null) { XmlNode node = m_configurationNode.SelectSingleNode("deParams"); if (node != null) { displayWs = XmlUtils.GetAttributeValue(node, "ws", "analysis vernacular").ToLower(); postDialogMessageTrigger = XmlUtils.GetAttributeValue(node, "postChangeMessageTrigger", null); } } Set <int> candidates = m_obj.ReferenceTargetCandidates(m_flid); ObjectLabelCollection labels = new ObjectLabelCollection(m_cache, candidates, m_displayNameProperty, displayWs); using (MorphTypeChooser chooser = GetChooser(labels)) { bool fMadeMorphTypeChange = false; ILexEntry entry = LexEntry.CreateFromDBObject(m_cache, m_obj.OwnerHVO); chooser.InitializeExtras(m_configurationNode, Mediator); chooser.SetObjectAndFlid(m_obj.Hvo, m_flid); int hvoType = m_cache.GetObjProperty(m_obj.Hvo, m_flid); chooser.MakeSelection(hvoType); // LT-4433 changed the Alternate Forms to choose between Stem and Affix automatically // when inserting. Thus, we need the check box in that environment as well. //if (m_obj.OwningFlid != (int)LexEntry.LexEntryTags.kflidLexemeForm) // chooser.ShowAllTypesCheckBoxVisible = false; if (chooser.ShowDialog() == DialogResult.OK) { ObjectLabel selected = chooser.ChosenOne; int hvoOriginal = TargetHvo; string sUndo = m_mediator.StringTbl.GetStringWithXPath("ChangeLexemeMorphTypeUndo", m_ksPath); string sRedo = m_mediator.StringTbl.GetStringWithXPath("ChangeLexemeMorphTypeRedo", m_ksPath); bool fRemoveComponents = false; if (selected.Hvo == entry.Cache.GetIdFromGuid(new Guid(MoMorphType.kguidMorphRoot)) || selected.Hvo == entry.Cache.GetIdFromGuid(new Guid(MoMorphType.kguidMorphBoundRoot))) { // changing to root...not allowed to have complex forms. foreach (LexEntryRef ler in entry.EntryRefsOS) { if (ler.RefType == LexEntryRef.krtComplexForm) { fRemoveComponents = true; // If there are no components we will delete without asking...but must then check for more // complex forms that DO have components. if (ler.ComponentLexemesRS.Count > 0) { if (MessageBox.Show(FindForm(), DetailControlsStrings.ksRootNoComponentsMessage, DetailControlsStrings.ksRootNoComponentsCaption, MessageBoxButtons.YesNo, MessageBoxIcon.Question, MessageBoxDefaultButton.Button1, 0, FwApp.App.HelpFile, HelpNavigator.Topic, "khtRootCannotHaveComponents") != DialogResult.Yes) { return; } break; } } } } using (new UndoRedoTaskHelper(entry.Cache, sUndo, sRedo)) { if (fRemoveComponents) { Set <int> delObjs = new Set <int>(); foreach (LexEntryRef ler in entry.EntryRefsOS) { if (ler.RefType == LexEntryRef.krtComplexForm) { delObjs.Add(ler.Hvo); } } CmObject.DeleteObjects(delObjs, m_cache); } if (IsStemType(hvoOriginal) || m_obj is MoStemAllomorph) { if (IsStemType(selected.Hvo)) { TargetHvo = selected.Hvo; } else { //have to switch from stem to affix fMadeMorphTypeChange = ChangeStemToAffix(entry, selected.Hvo, sUndo, sRedo); } } else { // original is affix variety if (IsStemType(selected.Hvo)) { //have to switch from affix to stem fMadeMorphTypeChange = ChangeAffixToStem(entry, selected.Hvo, sUndo, sRedo); } else { TargetHvo = selected.Hvo; } } if (selected.Hvo == entry.Cache.GetIdFromGuid(new Guid(MoMorphType.kguidMorphPhrase))) { ILexEntryRef ler = new LexEntryRef(); entry.EntryRefsOS.Append(ler); ler.RefType = LexEntryRef.krtComplexForm; ler.HideMinorEntry = 1; // No automatic propchanged for new objects, need to let the view see it. // At that point our slice will be disposed, so don't do anything after this. entry.Cache.PropChanged(entry.Hvo, (int)LexEntry.LexEntryTags.kflidEntryRefs, 0, 1, 0); } } } } }
public override void DeleteMultipleItems() { var projects = new Set <SolutionEntityItem> (); var folders = new List <ProjectFolder> (); foreach (ITreeNavigator node in CurrentNodes) { folders.Add((ProjectFolder)node.DataItem); } var removeButton = new AlertButton(GettextCatalog.GetString("_Remove from Project"), Gtk.Stock.Remove); var question = new QuestionMessage() { AllowApplyToAll = folders.Count > 1, SecondaryText = GettextCatalog.GetString( "The Delete option permanently removes the directory and any files it contains from your hard disk. " + "Click Remove from Project if you only want to remove it from your current solution.") }; question.Buttons.Add(AlertButton.Cancel); question.Buttons.Add(AlertButton.Delete); question.Buttons.Add(removeButton); foreach (var folder in folders) { var project = folder.Project; var folderRelativePath = folder.Path.ToRelative(project.BaseDirectory); var files = project.Files.GetFilesInVirtualPath(folderRelativePath).ToList(); var folderPf = project.Files.GetFileWithVirtualPath(folderRelativePath); //if the parent directory has already been removed, there may be nothing to do if (files.Count == 0 && folderPf == null) { continue; } question.Text = GettextCatalog.GetString("Are you sure you want to remove directory {0} from project {1}?", folder.Name, project.Name); var result = MessageService.AskQuestion(question); if (result != removeButton && result != AlertButton.Delete) { break; } projects.Add(project); //remove the files and link files in the directory foreach (var f in files) { project.Files.Remove(f); } // also remove the folder's own ProjectFile, if it exists // FIXME: it probably was already in the files list if (folderPf != null) { project.Files.Remove(folderPf); } if (result == AlertButton.Delete) { try { if (Directory.Exists(folder.Path)) { // FileService events should remove remaining files from the project FileService.DeleteDirectory(folder.Path); } } catch (Exception ex) { MessageService.ShowError(GettextCatalog.GetString( "The folder {0} could not be deleted from disk: {1}", folder.Path, ex.Message)); } } else { //explictly remove the node from the tree, since it currently only tracks real folder deletions folder.Remove(); } // If it's the last item in the parent folder, make sure we keep a reference to the parent // folder, so it is not deleted from the tree. var inParentFolder = project.Files.GetFilesInVirtualPath(folderRelativePath.ParentDirectory); if (!inParentFolder.Skip(1).Any()) { project.Files.Add(new ProjectFile(folder.Path.ParentDirectory) { Subtype = Subtype.Directory, }); } } IdeApp.ProjectOperations.Save(projects); }
protected void AddLiteralPrimitiveType(string name, IType type) { AddPrimitiveType(name, type); _literalPrimitives.Add(name); }
private void SkeletonDensify(double interval) // 这里使用interval是为了保持密度均一 { Set <int>[] initadj = new Set <int> [skeRcd.numSkeNode]; for (int i = 0; i < initadj.Count(); i++) { initadj[i] = new Set <int>(); } List <Set <int> > updateAdjVV = new List <Set <int> >(initadj); // 填充list bool[] flag = new bool[skeRcd.nodePosList.Count]; int start = 0; Stack <int> skeS = new Stack <int>(); skeS.Push(start); while (skeS.Count != 0) { int cur = skeS.Pop(); flag[cur] = true; int[] neighb = skeRcd.adjVV[cur].ToArray(); for (int i = 0; i < neighb.Length; i++) { int adjv_num = neighb[i]; if (flag[adjv_num]) { continue; } else { skeS.Push(adjv_num); #region expand the skeleton fragment double leng = (skeRcd.nodePosList[cur] - skeRcd.nodePosList[adjv_num]).Length(); int numDensity = (int)(leng / interval); if (numDensity == 0) { continue; } else { List <int> fragSkeindexlist = new List <int>(); fragSkeindexlist.Add(cur); int curSkeCount = skeRcd.nodePosList.Count; // initial index of current progress for (int j = 1; j <= numDensity; j++) // 从第二个点开始 { fragSkeindexlist.Add(curSkeCount); skeRcd.nodePosList.Add((1 - j * interval / leng) * (skeRcd.nodePosList[cur]) + j * interval / leng * (skeRcd.nodePosList[adjv_num])); updateAdjVV.Add(new Set <int>()); curSkeCount++; } fragSkeindexlist.Add(adjv_num); // next to build the new updateadjvv updateAdjVV[cur].Add(fragSkeindexlist[1]); for (int j = 1; j < fragSkeindexlist.Count - 1; j++) { Set <int> looptemp = new Set <int>(2); looptemp.Add(fragSkeindexlist[j - 1]); looptemp.Add(fragSkeindexlist[j + 1]); updateAdjVV[fragSkeindexlist[j]] = looptemp; } updateAdjVV[adjv_num].Add(fragSkeindexlist[fragSkeindexlist.Count - 2]); } #endregion } } } skeRcd.adjVV = updateAdjVV; // debug bool s = Checkchest(updateAdjVV); }
// Tries to emit switch as a jmp table private bool TryEmitSwitchInstruction(SwitchExpression node, CompilationFlags flags) { // If we have a comparison, bail if (node.Comparison != null) { return(false); } // Make sure the switch value type and the right side type // are types we can optimize var type = node.SwitchValue.Type; if (!CanOptimizeSwitchType(type) || type != node.Cases[0].TestValues[0].Type) { return(false); } // Make sure all test values are constant, or we can't emit the // jump table. if (!node.Cases.All(c => c.TestValues.All(t => t is ConstantExpression))) { return(false); } // // We can emit the optimized switch, let's do it. // // Build target labels, collect keys. var labels = new Label[node.Cases.Count]; var isGoto = new bool[node.Cases.Count]; var uniqueKeys = new Set <decimal>(); var keys = new List <SwitchLabel>(); for (var i = 0; i < node.Cases.Count; i++) { DefineSwitchCaseLabel(node.Cases[i], out labels[i], out isGoto[i]); foreach (ConstantExpression test in node.Cases[i].TestValues) { // Guarenteed to work thanks to CanOptimizeSwitchType. // // Use decimal because it can hold Int64 or UInt64 without // precision loss or signed/unsigned conversions. var key = ConvertSwitchValue(test.Value); // Only add each key once. If it appears twice, it's // allowed, but can't be reached. if (!uniqueKeys.Contains(key)) { keys.Add(new SwitchLabel(key, test.Value, labels[i])); uniqueKeys.Add(key); } } } // Sort the keys, and group them into buckets. keys.Sort((x, y) => Math.Sign(x.Key - y.Key)); var buckets = new List <List <SwitchLabel> >(); foreach (var key in keys) { AddToBuckets(buckets, key); } // Emit the switchValue var value = GetLocal(node.SwitchValue.Type); EmitExpression(node.SwitchValue); _ilg.Emit(OpCodes.Stloc, value); // Create end label, and default label if needed var end = _ilg.DefineLabel(); var @default = (node.DefaultBody == null) ? end : _ilg.DefineLabel(); // Emit the switch var info = new SwitchInfo(node, value, @default); EmitSwitchBuckets(info, buckets, 0, buckets.Count - 1); // Emit the case bodies and default EmitSwitchCases(node, labels, isGoto, @default, end, flags); FreeLocal(value); return(true); }
private void InitializeEntitySet(EntitySetBase entitySetBase, MetadataWorkspace workspace) { var mapping = (EntityContainerMapping)m_mappingCollection.GetMap(entitySetBase.EntityContainer); // make sure views have been generated for this sub-graph (trigger generation of the sub-graph // by retrieving a view for one of its components; not actually using the view here) if (mapping.HasViews) { m_mappingCollection.GetGeneratedView(entitySetBase, workspace); } var affectedTables = new Set <EntitySet>(); if (null != mapping) { var isNullConditionColumns = new Set <EdmMember>(); // find extent in the container mapping EntitySetBaseMapping setMapping; if (entitySetBase.BuiltInTypeKind == BuiltInTypeKind.EntitySet) { setMapping = mapping.GetEntitySetMapping(entitySetBase.Name); // Check for members that have result bindings in a function mapping. If a // function returns the member values, it indicates they are server-generated m_serverGenProperties.Unite(GetMembersWithResultBinding((EntitySetMapping)setMapping)); } else if (entitySetBase.BuiltInTypeKind == BuiltInTypeKind.AssociationSet) { setMapping = mapping.GetAssociationSetMapping(entitySetBase.Name); } else { Debug.Fail("unexpected extent type " + entitySetBase.BuiltInTypeKind); throw new NotSupportedException(); } // gather interesting tables, columns and properties from mapping fragments foreach (var mappingFragment in GetMappingFragments(setMapping)) { affectedTables.Add(mappingFragment.TableSet); // get all property mappings to figure out if anything is server generated m_serverGenProperties.AddRange(FindServerGenMembers(mappingFragment)); // get all columns participating in is null conditions isNullConditionColumns.AddRange(FindIsNullConditionColumns(mappingFragment)); } if (0 < isNullConditionColumns.Count) { // gather is null condition properties based on is null condition columns foreach (var mappingFragment in GetMappingFragments(setMapping)) { m_isNullConditionProperties.AddRange(FindPropertiesMappedToColumns(isNullConditionColumns, mappingFragment)); } } } m_affectedTables.Add(entitySetBase, affectedTables.MakeReadOnly()); InitializeFunctionMappingTranslators(entitySetBase, mapping); // for association sets, initialize AssociationSetMetadata if no function has claimed ownership // of the association yet if (entitySetBase.BuiltInTypeKind == BuiltInTypeKind.AssociationSet) { var associationSet = (AssociationSet)entitySetBase; if (!m_associationSetMetadata.ContainsKey(associationSet)) { m_associationSetMetadata.Add( associationSet, new AssociationSetMetadata( m_affectedTables[associationSet], associationSet, workspace)); } } }
internal static void Solve(Entity expr, VariableEntity x, Set dst, bool compensateSolving) { if (expr == x) { dst.Add(0); return; } // Applies an attempt to downcast roots void DestinationAddRange(Set toAdd) { toAdd.FiniteApply(ent => TryDowncast(expr, x, ent)); dst.AddRange(toAdd); } var polyexpr = expr.DeepCopy(); Set res = PolynomialSolver.SolveAsPolynomial(polyexpr, x); if (res != null) { res.FiniteApply(e => e.InnerSimplify()); DestinationAddRange(res); return; } if (expr.entType == Entity.EntType.OPERATOR) { switch (expr.Name) { case "mulf": Solve(expr.Children[0], x, dst); Solve(expr.Children[1], x, dst); return; case "divf": bool IsSetNumeric(Set a) => a.Select(piece => piece.LowerBound().Item1).All(MathS.CanBeEvaluated); var zeroNumerators = new Set(); Solve(expr.Children[0], x, zeroNumerators); if (!IsSetNumeric(zeroNumerators)) { dst.AddRange(zeroNumerators); return; } var zeroDenominators = new Set(); Solve(expr.Children[1], x, zeroDenominators); if (!IsSetNumeric(zeroDenominators)) { dst.AddRange(zeroNumerators); return; } dst.AddRange((zeroNumerators & !zeroDenominators) as Set); return; case "powf": Solve(expr.Children[0], x, dst); return; case "minusf": if (expr.Children[1].FindSubtree(x) == null && compensateSolving) { if (expr.Children[0] == x) { dst.Add(expr.Children[1]); return; } var subs = 0; Entity lastChild = null; foreach (var child in expr.Children[0].Children) { if (child.FindSubtree(x) != null) { subs += 1; lastChild = child; } } if (subs != 1) { break; } var resInverted = TreeAnalyzer.FindInvertExpression(expr.Children[0], expr.Children[1], lastChild); foreach (var result in resInverted.FiniteSet()) { Solve(lastChild - result, x, dst, compensateSolving: true); } return; } break; } } else if (expr.entType == Entity.EntType.FUNCTION) { DestinationAddRange(TreeAnalyzer.InvertFunctionEntity(expr as FunctionEntity, 0, x)); return; } // Here we generate a unique variable name var uniqVars = MathS.Utils.GetUniqueVariables(expr); uniqVars.Pieces.Sort((a, b) => ((Entity)b).Name.Length.CompareTo(((Entity)a).Name.Length)); VariableEntity newVar = ((Entity)uniqVars.Pieces[0]).Name + "quack"; // // // // Here we find all possible replacements var replacements = new List <Tuple <Entity, Entity> >(); replacements.Add(new Tuple <Entity, Entity>(TreeAnalyzer.GetMinimumSubtree(expr, x), expr)); foreach (var alt in expr.Alternate(4).FiniteSet()) { if ((alt).FindSubtree(x) == null) { return; // in this case there is either 0 or +oo solutions } replacements.Add(new Tuple <Entity, Entity>(TreeAnalyzer.GetMinimumSubtree(alt, x), alt)); } // // // // Here we find one that has at least one solution foreach (var replacement in replacements) { Set solutions = null; if (replacement.Item1 == x) { continue; } var newExpr = replacement.Item2.DeepCopy(); TreeAnalyzer.FindAndReplace(ref newExpr, replacement.Item1, newVar); solutions = newExpr.SolveEquation(newVar); if (!solutions.IsEmpty()) { var bestReplacement = replacement.Item1; // Here we are trying to solve for this replacement Set newDst = new Set(); foreach (var solution in solutions.FiniteSet()) { var str = bestReplacement.ToString(); // TODO: make a smarter comparison than just comparison of complexities of two expressions // The idea is // similarToPrevious = ((bestReplacement - solution) - expr).Simplify() == 0 // But Simplify costs us too much time var similarToPrevious = (bestReplacement - solution).Complexity() >= expr.Complexity(); if (!compensateSolving || !similarToPrevious) { Solve(bestReplacement - solution, x, newDst, compensateSolving: true); } } DestinationAddRange(newDst); if (!dst.IsEmpty()) { break; } // // // } } // // // // if no replacement worked, try trigonometry solver if (dst.IsEmpty()) { var trigexpr = expr.DeepCopy(); res = TrigonometricSolver.SolveLinear(trigexpr, x); if (res != null) { DestinationAddRange(res); return; } } // // // // if nothing has been found so far if (dst.IsEmpty() && MathS.Settings.AllowNewton) { Set allVars = new Set(); TreeAnalyzer._GetUniqueVariables(expr, allVars); if (allVars.Count == 1) { DestinationAddRange(expr.SolveNt(x)); } } }
internal EntityCommandDefinition( DbProviderFactory storeProviderFactory, DbCommandTree commandTree, DbInterceptionContext interceptionContext, IDbDependencyResolver resolver = null, BridgeDataReaderFactory bridgeDataReaderFactory = null, ColumnMapFactory columnMapFactory = null) { DebugCheck.NotNull(storeProviderFactory); DebugCheck.NotNull(commandTree); DebugCheck.NotNull(interceptionContext); _bridgeDataReaderFactory = bridgeDataReaderFactory ?? new BridgeDataReaderFactory(); _columnMapFactory = columnMapFactory ?? new ColumnMapFactory(); _storeProviderServices = (resolver != null ? resolver.GetService <DbProviderServices>(storeProviderFactory.GetProviderInvariantName()) : null) ?? storeProviderFactory.GetProviderServices(); try { if (DbCommandTreeKind.Query == commandTree.CommandTreeKind) { // Next compile the plan for the command tree var mappedCommandList = new List <ProviderCommandInfo>(); ColumnMap columnMap; int columnCount; PlanCompiler.Compile(commandTree, out mappedCommandList, out columnMap, out columnCount, out _entitySets); _columnMapGenerators = new IColumnMapGenerator[] { new ConstantColumnMapGenerator(columnMap, columnCount) }; // Note: we presume that the first item in the ProviderCommandInfo is the root node; Debug.Assert(mappedCommandList.Count > 0, "empty providerCommandInfo collection and no exception?"); // this shouldn't ever happen. // Then, generate the store commands from the resulting command tree(s) _mappedCommandDefinitions = new List <DbCommandDefinition>(mappedCommandList.Count); _mappedCommandReturnTypes = new List <RowType>(); foreach (var providerCommandInfo in mappedCommandList) { var providerCommandDefinition = _storeProviderServices.CreateCommandDefinition( providerCommandInfo.CommandTree, interceptionContext); _mappedCommandReturnTypes.Add(this.GetRowTypeFromCommandTree(providerCommandInfo.CommandTree)); if (null == providerCommandDefinition) { throw new ProviderIncompatibleException(Strings.ProviderReturnedNullForCreateCommandDefinition); } _mappedCommandDefinitions.Add(providerCommandDefinition); } } else { Debug.Assert( DbCommandTreeKind.Function == commandTree.CommandTreeKind, "only query and function command trees are supported"); var entityCommandTree = (DbFunctionCommandTree)commandTree; // Retrieve mapping and metadata information for the function import. var mapping = GetTargetFunctionMapping(entityCommandTree); IList <FunctionParameter> returnParameters = entityCommandTree.EdmFunction.ReturnParameters; var resultSetCount = returnParameters.Count > 1 ? returnParameters.Count : 1; _columnMapGenerators = new IColumnMapGenerator[resultSetCount]; var storeResultType = DetermineStoreResultType(mapping, 0, out _columnMapGenerators[0]); for (var i = 1; i < resultSetCount; i++) { DetermineStoreResultType(mapping, i, out _columnMapGenerators[i]); } // Copy over parameters (this happens through a more indirect route in the plan compiler, but // it happens nonetheless) var providerParameters = new List <KeyValuePair <string, TypeUsage> >(); foreach (var parameter in entityCommandTree.Parameters) { providerParameters.Add(parameter); } // Construct store command tree usage. var providerCommandTree = new DbFunctionCommandTree( entityCommandTree.MetadataWorkspace, DataSpace.SSpace, mapping.TargetFunction, storeResultType, providerParameters); var storeCommandDefinition = _storeProviderServices.CreateCommandDefinition(providerCommandTree); _mappedCommandDefinitions = new List <DbCommandDefinition>(1) { storeCommandDefinition }; var firstResultEntitySet = mapping.FunctionImport.EntitySets.FirstOrDefault(); if (firstResultEntitySet != null) { _entitySets = new Set <EntitySet>(); _entitySets.Add(mapping.FunctionImport.EntitySets.FirstOrDefault()); _entitySets.MakeReadOnly(); } } // Finally, build a list of the parameters that the resulting command should have; var parameterList = new List <EntityParameter>(); foreach (var queryParameter in commandTree.Parameters) { var parameter = CreateEntityParameterFromQueryParameter(queryParameter); parameterList.Add(parameter); } _parameters = new ReadOnlyCollection <EntityParameter>(parameterList); } catch (EntityCommandCompilationException) { // No need to re-wrap EntityCommandCompilationException throw; } catch (Exception e) { // we should not be wrapping all exceptions if (e.IsCatchableExceptionType()) { // we don't wan't folks to have to know all the various types of exceptions that can // occur, so we just rethrow a CommandDefinitionException and make whatever we caught // the inner exception of it. throw new EntityCommandCompilationException(Strings.EntityClient_CommandDefinitionPreparationFailed, e); } throw; } }
public ClusteringResult Cluster(int numOutdated, IUnlabeledExampleCollection <SparseVector <double> > batch) { Utils.ThrowException(batch == null ? new ArgumentNullException("batch") : null); Utils.ThrowException(numOutdated < 0 ? new ArgumentOutOfRangeException("numOutdated") : null); if (mDataset == null) { // initialize mLogger.Trace("Cluster", "Initializing ..."); Utils.ThrowException(numOutdated > 0 ? new ArgumentOutOfRangeException("numOutdated") : null); //Utils.ThrowException(batch.Count == 0 ? new ArgumentValueException("batch") : null); if (batch.Count == 0) { return(new ClusteringResult()); } kMeans(batch, Math.Min(mK, batch.Count)); mDataset = new UnlabeledDataset <SparseVector <double> >(batch); foreach (CentroidData centroid in mCentroids) { centroid.Tag = mTopicId++; } //OutputState(); } else { // update clusters Utils.ThrowException(numOutdated > mDataset.Count ? new ArgumentOutOfRangeException("numOutdated") : null); if (numOutdated == 0 && batch.Count == 0) { return(GetClusteringResult()); } mLogger.Trace("Cluster", "Updating clusters ..."); // assign new instances double dummy; Assign(mCentroids, ModelUtils.GetTransposedMatrix(batch), batch.Count, /*offs=*/ mDataset.Count, out dummy); mDataset.AddRange(batch); // remove outdated instances foreach (CentroidData centroid in mCentroids) { foreach (int item in centroid.CurrentItems) { if (item >= numOutdated) { centroid.Items.Add(item); } } centroid.Update(mDataset); centroid.UpdateCentroidLen(); } mDataset.RemoveRange(0, numOutdated); ArrayList <CentroidData> centroidsNew = new ArrayList <CentroidData>(mCentroids.Count); foreach (CentroidData centroid in mCentroids) { if (centroid.CurrentItems.Count > 0) { centroidsNew.Add(centroid); Set <int> tmp = new Set <int>(); foreach (int idx in centroid.CurrentItems) { tmp.Add(idx - numOutdated); } centroid.CurrentItems.Inner.SetItems(tmp); } } if (centroidsNew.Count == 0) // reset { mCentroids = null; mDataset = null; return(new ClusteringResult()); } mCentroids = centroidsNew; // execute main loop kMeansMainLoop(mDataset, mCentroids); //OutputState(); } // adjust k double minQual; // *** not used at the moment int minQualIdx; double qual = GetClustQual(out minQual, out minQualIdx); if (qual < mQualThresh) { while (qual < mQualThresh) // split cluster at minQualIdx { mLogger.Trace("Cluster", "Increasing k to {0} ...", mCentroids.Count + 1); mCentroids.Add(mCentroids[minQualIdx].Clone()); mCentroids.Last.Tag = mTopicId++; kMeansMainLoop(mDataset, mCentroids); if (mCentroids.Last.CurrentItems.Count > mCentroids[minQualIdx].CurrentItems.Count) { // swap topic identifiers object tmp = mCentroids.Last.Tag; mCentroids.Last.Tag = mCentroids[minQualIdx].Tag; mCentroids[minQualIdx].Tag = tmp; } qual = GetClustQual(out minQual, out minQualIdx); //OutputState(); } } else if (numOutdated > 0) { while (qual > mQualThresh && mCentroids.Count > 1) // join clusters { mLogger.Trace("Cluster", "Decreasing k to {0} ...", mCentroids.Count - 1); ArrayList <CentroidData> centroidsCopy = mCentroids.DeepClone(); if (mCentroids.Count == 2) // create single cluster { object topicId = mCentroids[0].CurrentItems.Count > mCentroids[1].CurrentItems.Count ? mCentroids[0].Tag : mCentroids[1].Tag; mCentroids = new ArrayList <CentroidData>(); mCentroids.Add(new CentroidData()); for (int i = 0; i < mDataset.Count; i++) { mCentroids.Last.Items.Add(i); } mCentroids.Last.Tag = topicId; mCentroids.Last.Update(mDataset); mCentroids.Last.UpdateCentroidLen(); } else { int idx1, idx2; GetMostSimilarClusters(out idx1, out idx2); CentroidData c1 = mCentroids[idx1]; CentroidData c2 = mCentroids[idx2]; object topicId = c1.CurrentItems.Count > c2.CurrentItems.Count ? c1.Tag : c2.Tag; mCentroids.RemoveAt(idx2); c1.Items.AddRange(c1.CurrentItems); c1.Items.AddRange(c2.CurrentItems); c1.Tag = topicId; c1.Update(mDataset); c1.UpdateCentroidLen(); kMeansMainLoop(mDataset, mCentroids); } qual = GetClustQual(); if (qual >= mQualThresh) { mLogger.Trace("Cluster", "Accepted solution at k = {0}.", mCentroids.Count); } else { mCentroids = centroidsCopy; } //OutputState(); } } OutputState(); return(GetClusteringResult()); }
public override void DeleteMultipleItems() { bool hasChildren = false; List <ProjectFile> files = new List <ProjectFile> (); Set <SolutionEntityItem> projects = new Set <SolutionEntityItem> (); foreach (ITreeNavigator node in CurrentNodes) { ProjectFile pf = (ProjectFile)node.DataItem; projects.Add(pf.Project); if (pf.HasChildren) { hasChildren = true; } files.Add(pf); } AlertButton removeFromProject = new AlertButton(GettextCatalog.GetString("_Remove from Project"), Gtk.Stock.Remove); string question, secondaryText; secondaryText = GettextCatalog.GetString("The Delete option permanently removes the file from your hard disk. " + "Click Remove from Project if you only want to remove it from your current solution."); if (hasChildren) { if (files.Count == 1) { question = GettextCatalog.GetString("Are you sure you want to remove the file {0} and " + "its code-behind children from project {1}?", Path.GetFileName(files[0].Name), files[0].Project.Name); } else { question = GettextCatalog.GetString("Are you sure you want to remove the selected files and " + "their code-behind children from the project?"); } } else { if (files.Count == 1) { question = GettextCatalog.GetString("Are you sure you want to remove file {0} from project {1}?", Path.GetFileName(files[0].Name), files[0].Project.Name); } else { question = GettextCatalog.GetString("Are you sure you want to remove the selected files from the project?"); } } AlertButton result = MessageService.AskQuestion(question, secondaryText, AlertButton.Delete, AlertButton.Cancel, removeFromProject); if (result != removeFromProject && result != AlertButton.Delete) { return; } foreach (ProjectFile file in files) { Project project = file.Project; var inFolder = project.Files.GetFilesInVirtualPath(file.ProjectVirtualPath.ParentDirectory).ToList(); if (inFolder.Count == 1 && inFolder [0] == file) { // This is the last project file in the folder. Make sure we keep // a reference to the folder, so it is not deleted from the tree. ProjectFile folderFile = new ProjectFile(project.BaseDirectory.Combine(file.ProjectVirtualPath.ParentDirectory)); folderFile.Subtype = Subtype.Directory; project.Files.Add(folderFile); } if (file.HasChildren) { foreach (ProjectFile f in file.DependentChildren) { project.Files.Remove(f); if (result == AlertButton.Delete) { FileService.DeleteFile(f.Name); } } } project.Files.Remove(file); if (result == AlertButton.Delete && !file.IsLink) { FileService.DeleteFile(file.Name); } } IdeApp.ProjectOperations.Save(projects); }
protected override void DoConvertMethodBody(IList <IStatement> outputs, IList <IStatement> inputs) { List <int> whileNumberOfNode = new List <int>(); List <int> fusedCountOfNode = new List <int>(); List <List <IStatement> > containersOfNode = new List <List <IStatement> >(); // the code may have multiple while(true) loops, however these must be disjoint. // therefore we treat 'while' as one container, but give each loop a different 'while number'. int outerWhileCount = 0; int currentOuterWhileNumber = 0; int currentFusedCount = 0; List <Set <IVariableDeclaration> > loopVarsOfWhileNumber = new List <Set <IVariableDeclaration> >(); // build the dependency graph var g = new DependencyGraph2(context, inputs, DependencyGraph2.BackEdgeHandling.Ignore, delegate(IWhileStatement iws) { if (iws is IFusedBlockStatement) { if (iws.Condition is IVariableReferenceExpression) { currentFusedCount++; } } else { outerWhileCount++; currentOuterWhileNumber = outerWhileCount; } }, delegate(IWhileStatement iws) { if (iws is IFusedBlockStatement) { if (iws.Condition is IVariableReferenceExpression) { currentFusedCount--; } } else { currentOuterWhileNumber = 0; } }, delegate(IConditionStatement ics) { }, delegate(IConditionStatement ics) { }, delegate(IStatement ist, int targetIndex) { int whileNumber = currentOuterWhileNumber; whileNumberOfNode.Add(whileNumber); fusedCountOfNode.Add(currentFusedCount); List <IStatement> containers = new List <IStatement>(); LoopMergingTransform.UnwrapStatement(ist, containers); containersOfNode.Add(containers); for (int i = 0; i < currentFusedCount; i++) { if (containers[i] is IForStatement ifs) { var loopVar = Recognizer.LoopVariable(ifs); if (loopVarsOfWhileNumber.Count <= whileNumber) { while (loopVarsOfWhileNumber.Count <= whileNumber) { loopVarsOfWhileNumber.Add(new Set <IVariableDeclaration>()); } } Set <IVariableDeclaration> loopVars = loopVarsOfWhileNumber[whileNumber]; loopVars.Add(loopVar); } } }); var nodes = g.nodes; var dependencyGraph = g.dependencyGraph; for (int whileNumber = 1; whileNumber < loopVarsOfWhileNumber.Count; whileNumber++) { foreach (var loopVar in loopVarsOfWhileNumber[whileNumber]) { // Any statement (in the while loop) that has a forward descendant and a backward descendant will be cloned, so we want to minimize the number of such nodes. // The free variables in this problem are the loop directions at the leaf statements, since all other loop directions are forced by these. // We find the optimal labeling of the free variables by solving a min cut problem on a special network. // The network is constructed so that the cost of a cut is equal to the number of statements that will be cloned. // The network has 2 nodes for every statement: an in-node and an out-node. // For a non-leaf statement, there is a capacity 1 edge from the in-node to out-node. This edge is cut when the statement is cloned. // For a leaf statement, there is an infinite capacity edge in both directions, or equivalently a single node. // If statement A depends on statement B, then there is an infinite capacity edge from in-A to in-B, and from out-B to out-A, // representing the fact that cloning A requires cloning B, but not the reverse. // If a statement must appear with a forward loop, it is connected to the source. // If a statement must appear with a backward loop, it is connected to the sink. // construct a capacitated graph int inNodeStart = 0; int outNodeStart = inNodeStart + dependencyGraph.Nodes.Count; int sourceNode = outNodeStart + dependencyGraph.Nodes.Count; int sinkNode = sourceNode + 1; int cutNodeCount = sinkNode + 1; Func <NodeIndex, int> getInNode = node => node + inNodeStart; Func <NodeIndex, int> getOutNode = node => node + outNodeStart; IndexedGraph network = new IndexedGraph(cutNodeCount); const float infinity = 1000000f; List <float> capacity = new List <float>(); List <NodeIndex> nodesOfInterest = new List <NodeIndex>(); foreach (var node in dependencyGraph.Nodes) { if (whileNumberOfNode[node] != whileNumber) { continue; } NodeIndex source = node; List <IStatement> containersOfSource = containersOfNode[source]; bool hasLoopVar = containersOfSource.Any(container => container is IForStatement && Recognizer.LoopVariable((IForStatement)container) == loopVar); if (!hasLoopVar) { continue; } nodesOfInterest.Add(node); IStatement sourceSt = nodes[source]; var readAfterWriteEdges = dependencyGraph.EdgesOutOf(source).Where(edge => !g.isWriteAfterRead[edge]); bool isLeaf = true; int inNode = getInNode(node); int outNode = getOutNode(node); foreach (var target in readAfterWriteEdges.Select(dependencyGraph.TargetOf)) { List <IStatement> containersOfTarget = containersOfNode[target]; IStatement targetSt = nodes[target]; ForEachMatchingLoopVariable(containersOfSource, containersOfTarget, (loopVar2, afs, bfs) => { if (loopVar2 == loopVar) { int inTarget = getInNode(target); int outTarget = getOutNode(target); network.AddEdge(inTarget, inNode); capacity.Add(infinity); network.AddEdge(outNode, outTarget); capacity.Add(infinity); isLeaf = false; } }); } if (isLeaf) { if (debug) { log.Add($"loopVar={loopVar.Name} leaf {sourceSt}"); } network.AddEdge(inNode, outNode); capacity.Add(infinity); network.AddEdge(outNode, inNode); capacity.Add(infinity); } else { network.AddEdge(inNode, outNode); capacity.Add(1f); } int fusedCount = fusedCountOfNode[node]; Direction desiredDirectionOfSource = GetDesiredDirection(loopVar, containersOfSource, fusedCount); if (desiredDirectionOfSource == Direction.Forward) { if (debug) { log.Add($"loopVar={loopVar.Name} forward {sourceSt}"); } network.AddEdge(sourceNode, inNode); capacity.Add(infinity); } else if (desiredDirectionOfSource == Direction.Backward) { if (debug) { log.Add($"loopVar={loopVar.Name} backward {sourceSt}"); } network.AddEdge(outNode, sinkNode); capacity.Add(infinity); } } network.IsReadOnly = true; // compute the min cut MinCut <NodeIndex, EdgeIndex> mc = new MinCut <EdgeIndex, EdgeIndex>(network, e => capacity[e]); mc.Sources.Add(sourceNode); mc.Sinks.Add(sinkNode); Set <NodeIndex> sourceGroup = mc.GetSourceGroup(); foreach (NodeIndex node in nodesOfInterest) { IStatement sourceSt = nodes[node]; bool forwardIn = sourceGroup.Contains(getInNode(node)); bool forwardOut = sourceGroup.Contains(getOutNode(node)); if (forwardIn != forwardOut) { if (debug) { log.Add($"loopVar={loopVar.Name} will clone {sourceSt}"); } } else if (forwardIn) { if (debug) { log.Add($"loopVar={loopVar.Name} wants forward {sourceSt}"); } } else { if (debug) { log.Add($"loopVar={loopVar.Name} wants backward {sourceSt}"); } var containers = containersOfNode[node]; bool isForwardLoop = true; foreach (var container in containers) { if (container is IForStatement) { IForStatement ifs = (IForStatement)container; if (Recognizer.LoopVariable(ifs) == loopVar) { isForwardLoop = Recognizer.IsForwardLoop(ifs); } } } if (isForwardLoop) { Set <IVariableDeclaration> loopVarsToReverse; if (!loopVarsToReverseInStatement.TryGetValue(sourceSt, out loopVarsToReverse)) { // TODO: re-use equivalent sets loopVarsToReverse = new Set <IVariableDeclaration>(); loopVarsToReverseInStatement.Add(sourceSt, loopVarsToReverse); } loopVarsToReverse.Add(loopVar); } } } } } base.DoConvertMethodBody(outputs, inputs); }
public override void OnImportAsset(AssetImportContext context) { string animationClipName = Path.GetFileNameWithoutExtension(context.assetPath); string propertyName = ""; string[] supportedProperties = { "_translateX", "_translateY", "_translateZ", "_rotateX", "_rotateY", "_rotateZ", "_scaleX", "_scaleY", "_scaleZ" }; string[] propertyNames = { "localPosition.x", "localPosition.y", "localPosition.z", "localEulerAngles.x", "localEulerAngles.y", "localEulerAngles.z", "localScale.x", "localScale.y", "localScale.z" }; //check if animation clip is supported int i = 0; foreach (var property in supportedProperties) { if (animationClipName.Contains(property)) { animationClipName = animationClipName.Replace(property, ""); // to replace the specific text with blank propertyName = propertyNames[i]; break; } i++; } bool isRotateCurve = false; if (i > 2 && i < 6) { isRotateCurve = true; } if (propertyName == "") { //skip since it's not supported //context.SetMainAsset("AnimatorController", new GameObject()); return; } string clipName = Path.GetDirectoryName(context.assetPath) + "/" + animationClipName + ".anim"; AnimationClip animationClip = AssetDatabase.LoadAssetAtPath <AnimationClip>(clipName); if (animationClip == null) { animationClip = new AnimationClip(); animationClip.name = animationClipName; } //fill the animation curve if (isRotateCurve) //completely different path { AnimationCurve animRotateX = new AnimationCurve(); AnimationCurve animRotateY = new AnimationCurve(); AnimationCurve animRotateZ = new AnimationCurve(); LoadAnimationCurve(Path.GetDirectoryName(context.assetPath) + "/" + animationClipName + "_rotateX.animfa", animRotateX, false); LoadAnimationCurve(Path.GetDirectoryName(context.assetPath) + "/" + animationClipName + "_rotateY.animfa", animRotateY, false); LoadAnimationCurve(Path.GetDirectoryName(context.assetPath) + "/" + animationClipName + "_rotateZ.animfa", animRotateZ, false); AnimationCurve correctedAnimRotateX = new AnimationCurve(); AnimationCurve correctedAnimRotateY = new AnimationCurve(); AnimationCurve correctedAnimRotateZ = new AnimationCurve(); AnimationCurve correctedAnimRotateW = new AnimationCurve(); float radiansToAngles = 180.0f / Mathf.PI; Set <float> set = new Set <float>(); for (int k = 0; k < animRotateX.length; k++) { set.Add(animRotateX.keys[k].time); } for (int k = 0; k < animRotateY.length; k++) { set.Add(animRotateY.keys[k].time); } for (int k = 0; k < animRotateZ.length; k++) { set.Add(animRotateZ.keys[k].time); } foreach (var key in set) { float time = key.Key; float mayaRotX = animRotateX.Evaluate(time) * radiansToAngles; float mayaRotY = animRotateY.Evaluate(time) * radiansToAngles; float mayaRotZ = animRotateZ.Evaluate(time) * radiansToAngles; var flippedRotation = new Vector3(mayaRotX, -mayaRotY, -mayaRotZ); var qx = Quaternion.AngleAxis(flippedRotation.x, Vector3.right); var qy = Quaternion.AngleAxis(flippedRotation.y, Vector3.up); var qz = Quaternion.AngleAxis(flippedRotation.z, Vector3.forward); var unityRotationQuaternion = qx * qy * qz; correctedAnimRotateX.AddKey(time, unityRotationQuaternion.x); correctedAnimRotateY.AddKey(time, unityRotationQuaternion.y); correctedAnimRotateZ.AddKey(time, unityRotationQuaternion.z); correctedAnimRotateW.AddKey(time, unityRotationQuaternion.w); } animationClip.SetCurve("", typeof(Transform), "localRotation.x", correctedAnimRotateX); animationClip.SetCurve("", typeof(Transform), "localRotation.y", correctedAnimRotateY); animationClip.SetCurve("", typeof(Transform), "localRotation.z", correctedAnimRotateZ); animationClip.SetCurve("", typeof(Transform), "localRotation.w", correctedAnimRotateW); //This ensures a smooth interpolation animationClip.EnsureQuaternionContinuity(); } else { AnimationCurve anim = new AnimationCurve(); bool reverseValues = false; if (propertyName == "_rotateX") { reverseValues = true; } LoadAnimationCurve(context.assetPath, anim, reverseValues); animationClip.SetCurve("", typeof(Transform), propertyName, anim); } AssetDatabase.CreateAsset(animationClip, clipName); //string controllerName = Path.GetDirectoryName(context.assetPath) + "/" + animationClipName + ".controller"; //var controller = UnityEditor.Animations.AnimatorController.CreateAnimatorControllerAtPathWithClip(controllerName, animationClip); //context.SetMainAsset("AnimatorController", new GameObject()); }
public void Add(T item) { Set.Add(item); }
/// <summary> /// Boolean members have a closed domain and are enumerated when domains are established i.e. (T, F) instead of (notNull). /// Query Rewriting is exercised over every domain of the condition member. If the member contains not_null condition /// for example, it cannot generate a view for partitions (member=T), (Member=F). For this reason we need to expand the cells /// in a predefined situation (below) to include sub-fragments mapping individual elements of the closed domain. /// Enums (a planned feature) need to be handled in a similar fashion. /// /// Find booleans that are projected with a not_null condition /// Expand ALL cells where they are projected. Why? See Unit Test case NullabilityConditionOnBoolean5.es /// Validation will fail because it will not be able to validate rewritings for partitions on the 'other' cells. /// </summary> private void ExpandCells(List <Cell> cells) { var sSideMembersToBeExpanded = new Set <MemberPath>(); foreach (Cell cell in cells) { //Find Projected members that are Boolean AND are mentioned in the Where clause with not_null condition foreach (var memberToExpand in cell.SQuery.GetProjectedMembers() .Where(member => IsBooleanMember(member)) .Where(boolMember => cell.SQuery.GetConjunctsFromWhereClause() .Where(restriction => restriction.Domain.Values.Contains(Constant.NotNull)) .Select(restriction => restriction.RestrictedMemberSlot.MemberPath).Contains(boolMember))) { sSideMembersToBeExpanded.Add(memberToExpand); } } //Foreach s-side members, find all c-side members it is mapped to // We need these because we need to expand all cells where the boolean candidate is projected or mapped member is projected, e.g: // (1) C[id, cdisc] WHERE d=true <=> T1[id, sdisc] WHERE sdisc=NOTNULL // (2) C[id, cdisc] WHERE d=false <=> T2[id, sdisc] // Here we need to know that because of T1.sdisc, we need to expand T2.sdisc. // This is done by tracking cdisc, and then seeing in cell 2 that it is mapped to T2.sdisc var cSideMembersForSSideExpansionCandidates = new Dictionary <MemberPath, Set <MemberPath> >(); foreach (Cell cell in cells) { foreach (var sSideMemberToExpand in sSideMembersToBeExpanded) { var cSideMembers = cell.SQuery.GetProjectedPositions(sSideMemberToExpand).Select(pos => ((MemberProjectedSlot)cell.CQuery.ProjectedSlotAt(pos)).MemberPath); Set <MemberPath> cSidePaths = null; if (!cSideMembersForSSideExpansionCandidates.TryGetValue(sSideMemberToExpand, out cSidePaths)) { cSidePaths = new Set <MemberPath>(); cSideMembersForSSideExpansionCandidates[sSideMemberToExpand] = cSidePaths; } cSidePaths.AddRange(cSideMembers); } } // Expand cells that project members collected earlier with T/F conditiions foreach (Cell cell in cells.ToArray()) { //Each member gets its own expansion. Including multiple condition candidates in one SQuery // "... <=> T[..] WHERE a=notnull AND b=notnull" means a and b get their own independent expansions // Note: this is not a cross-product foreach (var memberToExpand in sSideMembersToBeExpanded) { var mappedCSideMembers = cSideMembersForSSideExpansionCandidates[memberToExpand]; //Check if member is projected in this cell. if (cell.SQuery.GetProjectedMembers().Contains(memberToExpand)) { // Creationg additional cel can fail when the condition to be appended contradicts existing condition in the CellQuery // We don't add contradictions because they seem to cause unrelated problems in subsequent validation routines Cell resultCell = null; if (TryCreateAdditionalCellWithCondition(cell, memberToExpand, true /*condition value*/, ViewTarget.UpdateView /*s-side member*/, out resultCell)) { cells.Add(resultCell); } if (TryCreateAdditionalCellWithCondition(cell, memberToExpand, false /*condition value*/, ViewTarget.UpdateView /*s-side member*/, out resultCell)) { cells.Add(resultCell); } } else { //If the s-side member is not projected, see if the mapped C-side member(s) is projected foreach (var cMemberToExpand in cell.CQuery.GetProjectedMembers().Intersect(mappedCSideMembers)) { Cell resultCell = null; if (TryCreateAdditionalCellWithCondition(cell, cMemberToExpand, true /*condition value*/, ViewTarget.QueryView /*c-side member*/, out resultCell)) { cells.Add(resultCell); } if (TryCreateAdditionalCellWithCondition(cell, cMemberToExpand, false /*condition value*/, ViewTarget.QueryView /*c-side member*/, out resultCell)) { cells.Add(resultCell); } } } } } }
public void RemoveMany() { Set <string> set1 = new Set <string>(StringComparer.InvariantCultureIgnoreCase); set1.Add("foo"); set1.Add("Eric"); set1.Add("Clapton"); set1.Add(null); set1.Add("fudd"); set1.Add("elmer"); string[] s_array = { "FOO", "jasmine", "eric", null }; int count = set1.RemoveMany(s_array); Assert.AreEqual(3, count); InterfaceTests.TestReadWriteCollectionGeneric(set1, new string[] { "Clapton", "elmer", "fudd" }, false); set1.Clear(); set1.Add("foo"); set1.Add("Eric"); set1.Add("Clapton"); set1.Add(null); set1.Add("fudd"); count = set1.RemoveMany(set1); Assert.AreEqual(5, count); Assert.AreEqual(0, set1.Count); }
public void SetTest_0() { set1.Add(10); set1.Add(5); set1.Add(15); set1.Add(2); set1.Add(7); set1.Add(13); set1.Add(18); for (int i = 0; i <= 20; ++i) { set1.Add(i); } for (int i = 0; i <= 20; ++i) { Assert.IsTrue(set1.Search(i)); } set1.Delete(9); set1.Delete(8); set1.Delete(5); set1.Delete(6); set1.Delete(7); set1.Delete(1); set1.Delete(2); set1.Delete(3); set1.Delete(15); set1.Delete(14); set1.Delete(10); Assert.IsTrue(set1.Search(13)); Assert.IsTrue(set1.Search(11)); Assert.IsTrue(set1.Search(12)); Assert.IsTrue(set1.Search(18)); Assert.IsTrue(set1.Search(16)); Assert.IsTrue(set1.Search(17)); Assert.IsTrue(set1.Search(19)); Assert.IsTrue(set1.Search(20)); }
public SegmentLimit <Variable> AssignInParallel <Expression>(Dictionary <Variable, FList <Variable> > sourceToTargets, Converter <Variable, Expression> convert, IExpressionDecoder <Variable, Expression> decoder) { #region Contracts Contract.Requires(sourceToTargets != null); Contract.Requires(convert != null); Contract.Requires(decoder != null); Contract.Ensures(Contract.Result <SegmentLimit <Variable> >() != null); #endregion var newSet = new Set <NormalizedExpression <Variable> >(); foreach (var x in expressions) { Contract.Assume(x != null); FList <Variable> targets; int value; Variable var; if (x.IsConstant(out value)) { newSet.Add(x); } else if (x.IsVariable(out var)) { if (sourceToTargets.TryGetValue(var, out targets)) { Contract.Assume(targets != null); foreach (var newName in targets.GetEnumerable()) { newSet.Add(NormalizedExpression <Variable> .For(newName)); } } } else if (x.IsAddition(out var, out value)) { if (sourceToTargets.TryGetValue(var, out targets)) { Contract.Assume(targets != null); foreach (var newName in targets.GetEnumerable()) { newSet.Add(NormalizedExpression <Variable> .For(newName, value)); } } // This is a special case to handle renaming for a[p++] = ... // We have (var + value) --> var Variable source; if (IsATarget(var, sourceToTargets, out source)) { Variable v; int k; if (decoder.TryMatchVarPlusConst(convert(source), out v, out k) && v.Equals(var) && k == value) { newSet.Add(NormalizedExpression <Variable> .For(var)); } } } } return(new SegmentLimit <Variable>(newSet, this.IsConditional)); }
/// <summary> /// This recursively searches the specified directory for the search files. /// </summary> /// <param name="p_strPath">The path of the direcotry to recursively search.</param> /// <param name="p_strSearchFiles">The files to search for when auto-detecting.</param> /// <param name="p_intMaxDepth">The depth to hich the search for the search files.</param> /// <param name="p_intDepth">The current search depth.</param> protected string SearchToDepth(string p_strPath, string[] p_strSearchFiles, Int32 p_intMaxDepth, Int32 p_intDepth) { if ((p_intMaxDepth > -1) && (p_intDepth > p_intMaxDepth) || m_setSkipFolders.Contains(p_strPath)) { return(null); } ItemMessage = p_strPath; if (!m_setSearchedFolders.Contains(p_strPath)) { m_setSearchedFolders.Add(p_strPath); foreach (string strSearchFile in p_strSearchFiles) { if (Status == TaskStatus.Cancelling) { return(null); } try { string[] strFoundFiles = Directory.GetFiles(p_strPath, strSearchFile, SearchOption.TopDirectoryOnly); foreach (string strFoundFile in strFoundFiles) { if (ConfirmFoundInstallationPath(Path.GetDirectoryName(strFoundFile))) { return(Path.GetDirectoryName(strFoundFile)); } } } catch (UnauthorizedAccessException) { //we don't have access to the path we are trying to search, so let's bail return(null); } } } try { string[] strDirectories = Directory.GetDirectories(p_strPath); foreach (string strDirectory in strDirectories) { if (Status == TaskStatus.Cancelling) { return(null); } if (Path.GetFileName(strDirectory).StartsWith("$")) { continue; } string strFound = SearchToDepth(strDirectory, p_strSearchFiles, p_intMaxDepth, p_intDepth + 1); if (!String.IsNullOrEmpty(strFound)) { return(strFound); } } } catch (UnauthorizedAccessException) { //we don't have access to the path we are trying to search, so let's bail return(null); } return(null); }
public virtual Set <KeyValuePair <string, byte> > GetPreferredDTEPairs(Set <string> replacements, Set <KeyValuePair <string, byte> > currentPairs, Stack <byte> dteBytes, System.ComponentModel.BackgroundWorker worker) { // Clone the sections var secs = GetCopyOfSections(); IList <byte> bytes = GetSectionByteArrays(secs, SelectedTerminator, CharMap, CompressionAllowed, DteAllowed).Join(); Set <KeyValuePair <string, byte> > result = new Set <KeyValuePair <string, byte> >(); // Determine if we even need to do DTE at all int bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (bytesNeeded <= 0) { return(result); } // Take the pairs that were already used for other files and encode this file with them result.AddRange(currentPairs); TextUtilities.DoDTEEncoding(secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs(result)); bytes = GetSectionByteArrays(secs, SelectedTerminator, CharMap, CompressionAllowed, DteAllowed).Join(); // If enough bytes were saved with the existing pairs, no need to look further bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (bytesNeeded <= 0) { return(result); } string terminatorString = string.Format("{{0x{0:X2}", selectedTerminator) + "}"; // Otherwise, get all the strings that can be DTE encoded StringBuilder sb = new StringBuilder(Layout.Size); for (int i = 0; i < secs.Count; i++) { if (DteAllowed[i]) { secs[i].ForEach(t => sb.Append(t).Append(terminatorString)); } } // ... determine pair frequency var dict = TextUtilities.GetPairAndTripleCounts(sb.ToString(), replacements); // Sort the list by count var l = new List <KeyValuePair <string, int> >(dict); l.Sort((a, b) => b.Value.CompareTo(a.Value)); // Go through each one, encode the file with it, and see if we're below the limit while (bytesNeeded > 0 && l.Count > 0 && dteBytes.Count > 0) { /* * byte currentDteByte = dteBytes.Pop(); * for (int j = 0; j < l.Count; j++) * { * var tempResult = new Set<KeyValuePair<string, byte>>( result ); * tempResult.Add( new KeyValuePair<string, byte>( l[j].Key, currentDteByte ) ); * * var oldBytesNeeded = bytesNeeded; * TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( tempResult ) ); * bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); * * var newBytesNeeded = bytes.Count - (Layout.Size - DataStart); * if (newBytesNeeded < oldBytesNeeded) * { * bytesNeeded = newBytesNeeded; * result.Add( new KeyValuePair<string, byte>( l[j].Key, currentDteByte ) ); * TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( result ) ); * //bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); * //bytesNeeded = bytes.Count - (Layout.Size - DataStart); * * if (newBytesNeeded > 0) * { * StringBuilder sb2 = new StringBuilder( Layout.Size ); * for (int i = 0; i < secs.Count; i++) * { * if (DteAllowed[i]) * { * secs[i].ForEach( t => sb2.Append( t ).Append( terminatorString ) ); * } * } * l = new List<KeyValuePair<string, int>>( TextUtilities.GetPairAndTripleCounts( sb2.ToString(), replacements ) ); * l.Sort( ( a, b ) => b.Value.CompareTo( a.Value ) ); * * secs = GetCopyOfSections(); * } * * break; * } * } */ result.Add(new KeyValuePair <string, byte>(l[0].Key, dteBytes.Pop())); TextUtilities.DoDTEEncoding(secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs(result)); bytes = GetSectionByteArrays(secs, SelectedTerminator, CharMap, CompressionAllowed, DteAllowed).Join(); bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (bytesNeeded > 0) { if (worker != null) { worker.ReportProgress(0, new ProgressForm.FileProgress { File = this, State = ProgressForm.TaskState.Starting, Task = ProgressForm.Task.CalculateDte, BytesLeft = bytesNeeded }); } StringBuilder sb2 = new StringBuilder(Layout.Size); for (int i = 0; i < secs.Count; i++) { if (DteAllowed[i]) { secs[i].ForEach(t => sb2.Append(t).Append(terminatorString)); } } l = new List <KeyValuePair <string, int> >(TextUtilities.GetPairAndTripleCounts(sb2.ToString(), replacements)); l.Sort((a, b) => b.Value.CompareTo(a.Value)); secs = GetCopyOfSections(); } } // Ran out of available pairs and still don't have enough space --> error if (bytesNeeded > 0) { return(null); } return(result); }
internal void SetMorphoForms(List <MorphoFormNative> morphoForms) { if (morphoForms.Count != 0) { //---_MorphoForms = morphoForms.ToArray(); LinkedList <MorphoAttributeEnum> morphoAttributes = null; for (int i = 0, len = morphoForms.Count; i < len; i++) { var morphoForm = morphoForms[i]; #region [.окончания морфо-форм.] tempBufferHS.Add((IntPtr)morphoForm.Ending); #endregion #region [.MorphoFormEndingUpper-&-MorphoAttribute.] var endingUpperPtr = (IntPtr)morphoForm.EndingUpper; if (!tempBufferDict.TryGetValue(endingUpperPtr, ref morphoAttributes)) { morphoAttributes = PopLinkedList(); tempBufferDict.Add(endingUpperPtr, morphoAttributes); } var morphoAttribute = MorphoAttributePair.GetMorphoAttribute(this, morphoForm); morphoAttributes.AddLast(morphoAttribute); #endregion } #region [.окончания морфо-форм.] _MorphoFormEndings = new char *[tempBufferHS.Count]; fixed(char **morphoFormEndingsBase = _MorphoFormEndings) { var it = tempBufferHS.GetEnumerator(); for (var i = 0; it.MoveNext(); i++) { *(morphoFormEndingsBase + i) = (char *)it.Current; } #region commented. foreach. /*var i = 0; * foreach ( var intptr in tempBufferHS ) * { *(morphoFormEndingsBase + i++) = (char*) intptr; * }*/ #endregion } tempBufferHS.Clear(); #endregion #region [.MorphoFormEndingUpper-&-MorphoAttribute.] _MorphoFormEndingUpperAndMorphoAttributes = new MorphoFormEndingUpperAndMorphoAttribute[tempBufferDict.Count]; var it2 = tempBufferDict.GetEnumerator(); for (var i = 0; it2.MoveNext(); i++) { _MorphoFormEndingUpperAndMorphoAttributes[i] = new MorphoFormEndingUpperAndMorphoAttribute( it2.Current_IntPtr, it2.Current_Value); PushLinkedList(it2.Current_Value); } #region commented /* * var k = 0; * foreach ( var p in tempBufferDict ) * { * _MorphoFormEndingUpperAndMorphoAttributes[ k++ ] = new MorphoFormEndingUpperAndMorphoAttribute( p.Key, p.Value.ToArray() ); * PushLinkedList( p.Value ); * } */ #endregion tempBufferDict.Clear(); #endregion } else { //_MorphoForms = EMPTY_MORPHOFORM; _MorphoFormEndings = EMPTY_ENDINGS; _MorphoFormEndingUpperAndMorphoAttributes = EMPTY_MFUEMA; } }
/// ----------------------------------------------------------------------------------- /// <summary> /// Replace the user prompt with the text the user typed. This method is called from /// the views code when the user prompt is edited. /// </summary> /// <param name="vwsel">Current selection in rootbox where this prop was updated</param> /// <param name="hvo">Hvo of the paragraph/string/segment whose contents are being /// changed</param> /// <param name="tag">Tag (must be SimpleRootSite.kTagUserPrompt)</param> /// <param name="frag">Owning flid of the text/object that owns the paragraph/string/ /// segment whose user prompt is being replaced with typed text</param> /// <param name="tssVal">Text the user just typed</param> /// <returns>possibly modified ITsString.</returns> /// <remarks>The return value is currently ignored in production code, but we use it /// in our tests.</remarks> /// ----------------------------------------------------------------------------------- public override ITsString UpdateProp(IVwSelection vwsel, int hvo, int tag, int frag, ITsString tssVal) { Debug.Assert(tag == SimpleRootSite.kTagUserPrompt, "Got an unexpected tag"); Debug.Assert(vwsel != null, "Got a null selection!"); Debug.Assert(vwsel.IsValid, "Got an invalid selection!"); IVwRootBox rootbox = vwsel.RootBox; // If a (typically Chinese) character composition is in progress, replacing the prompt will // destroy the selection and end the composition, causing weird typing problems (TE-8267). // Ending the composition does another Commit, which ensures that this will eventually be // called when there is NOT a composition in progress. if (rootbox.IsCompositionInProgress) { return(tssVal); } // Remove the UserPrompt pseudo-property from the text the user typed. // when appropriate also ensure the correct writing system. // The correct WS is m_wsDefault in the view constructor ITsStrBldr bldr = tssVal.GetBldr(); if (frag != SegmentTags.kflidFreeTranslation) { bldr.SetIntPropValues(0, bldr.Length, (int)FwTextPropType.ktptWs, (int)FwTextPropVar.ktpvDefault, m_wsDefault); } // Delete the user prompt property from the string (TE-3994) bldr.SetIntPropValues(0, bldr.Length, SimpleRootSite.ktptUserPrompt, -1, -1); tssVal = bldr.GetString(); // Get information about current selection int cvsli = vwsel.CLevels(false); cvsli--; // CLevels includes the string property itself, but AllTextSelInfo doesn't need it. int ihvoRoot; int tagTextProp_Ignore; int cpropPrevious; int ichAnchor; int ichEnd; int ihvoEnd; // Prior to the Commit in selection changed which causes this UpdateProp to be called, // earlier selection changed code has expanded the selection (because it is in a user prompt) // to the whole prompt. It is therefore a range selection, and the value of fAssocPrev we got // is useless. bool fAssocPrev_Ignore; int ws; ITsTextProps ttp; SelLevInfo[] rgvsli = SelLevInfo.AllTextSelInfo(vwsel, cvsli, out ihvoRoot, out tagTextProp_Ignore, out cpropPrevious, out ichAnchor, out ichEnd, out ws, out fAssocPrev_Ignore, out ihvoEnd, out ttp); int tagTextProp; ITsTextProps props = null; if (frag == SegmentTags.kflidFreeTranslation) { // If the length is zero...we need to suppress replacing the comment with a prompt. if (tssVal.Length == 0) { m_hvoOfSegmentWhoseBtPromptIsToBeSupressed = hvo; } ISegment seg = Cache.ServiceLocator.GetInstance <ISegmentRepository>().GetObject(hvo); if (seg.FreeTranslation.get_String(BackTranslationWS).Length == 0) { // Undo needs to unset suppressing the comment prompt. Cache.ActionHandlerAccessor.AddAction(new UndoSuppressBtPrompt(this, seg)); } ws = BackTranslationWS; tagTextProp = frag; seg.FreeTranslation.set_String(ws, tssVal); rootbox.PropChanged(seg.Paragraph.Owner.Hvo, StTextTags.kflidParagraphs, seg.Paragraph.IndexInOwner, 1, 1); } else { ReplacePromptUndoAction undoAction = new ReplacePromptUndoAction(hvo, rootbox, m_updatedPrompts); if (m_cache.ActionHandlerAccessor != null) { m_cache.ActionHandlerAccessor.AddAction(new UndoSelectionAction(rootbox.Site, true, vwsel)); m_cache.ActionHandlerAccessor.AddAction(undoAction); } // Mark the user prompt as having been updated - will not show prompt again. // Note: ReplacePromptUndoAction:Undo removes items from the Set. m_updatedPrompts.Add(hvo); // Replace the ITsString in the paragraph or translation props = StyleUtils.CharStyleTextProps(null, m_wsDefault); if (frag == CmTranslationTags.kflidTranslation) { ICmTranslation trans = Cache.ServiceLocator.GetInstance <ICmTranslationRepository>().GetObject(hvo); trans.Translation.set_String(m_wsDefault, tssVal); undoAction.ParaHvo = trans.Owner.Hvo; ws = BackTranslationWS; tagTextProp = frag; } else { IStTxtPara para = Cache.ServiceLocator.GetInstance <IStTxtParaRepository>().GetObject(hvo); para.Contents = tssVal; undoAction.ParaHvo = hvo; ws = 0; tagTextProp = StTxtParaTags.kflidContents; } // Do a fake propchange to update the prompt rootbox.PropChanged(undoAction.ParaHvo, StParaTags.kflidStyleRules, 0, 1, 1); } // Now request a selection at the end of the text that was just put in. rootbox.Site.RequestSelectionAtEndOfUow(rootbox, ihvoRoot, cvsli, rgvsli, tagTextProp, cpropPrevious, ichEnd, ws, true, props); return(tssVal); }
public static void RunWithCommandLineArguments(string[] args) { //System.Diagnostics.Debugger.Break(); ConformanceTester confTester = null; try { ConfTesterCommandLineSettings settings = new ConfTesterCommandLineSettings(); if (!Parser.ParseArgumentsWithUsage(args, settings)) { //Console.ReadLine(); return; } #region load the libraries List <Assembly> libs = new List <Assembly>(); try { if (settings.reference != null) { foreach (string l in settings.reference) { libs.Add(System.Reflection.Assembly.LoadFrom(l)); } } } catch (Exception e) { throw new ModelProgramUserException(e.Message); } #endregion #region create the implementation stepper using the factory method string implStepperMethodName; string implStepperClassName; ReflectionHelper.SplitFullMethodName(settings.iut, out implStepperClassName, out implStepperMethodName); Type implStepperType = ReflectionHelper.FindType(libs, implStepperClassName); MethodInfo implStepperMethod = ReflectionHelper.FindMethod(implStepperType, implStepperMethodName, Type.EmptyTypes, typeof(IStepper)); IStepper implStepper = null; try { implStepper = (IStepper)implStepperMethod.Invoke(null, null); } catch (Exception e) { throw new ModelProgramUserException("Invocation of '" + settings.iut + "' failed: " + e.ToString()); } #endregion #region create a model program for each model using the factory method and compose into product string mpMethodName; string mpClassName; ModelProgram mp = null; if (settings.model != null && settings.model.Length > 0) { ReflectionHelper.SplitFullMethodName(settings.model[0], out mpClassName, out mpMethodName); Type mpType = ReflectionHelper.FindType(libs, mpClassName); MethodInfo mpMethod = ReflectionHelper.FindMethod(mpType, mpMethodName, Type.EmptyTypes, typeof(ModelProgram)); try { mp = (ModelProgram)mpMethod.Invoke(null, null); } catch (Exception e) { throw new ModelProgramUserException("Invocation of '" + settings.model[0] + "' failed: " + e.ToString()); } for (int i = 1; i < settings.model.Length; i++) { ReflectionHelper.SplitFullMethodName(settings.model[i], out mpClassName, out mpMethodName); mpType = ReflectionHelper.FindType(libs, mpClassName); mpMethod = ReflectionHelper.FindMethod(mpType, mpMethodName, Type.EmptyTypes, typeof(ModelProgram)); ModelProgram mp2 = null; try { mp2 = (ModelProgram)mpMethod.Invoke(null, null); } catch (Exception e) { throw new ModelProgramUserException("Invocation of '" + settings.model[i] + "' failed: " + e.ToString()); } mp = new ProductModelProgram(mp, mp2); } } #endregion #region load the test cases if any Sequence <Sequence <CompoundTerm> > testcases = Sequence <Sequence <CompoundTerm> > .EmptySequence; if (!String.IsNullOrEmpty(settings.testSuite)) { try { System.IO.StreamReader testSuiteReader = new System.IO.StreamReader(settings.testSuite); string testSuiteAsString = testSuiteReader.ReadToEnd(); testSuiteReader.Close(); CompoundTerm testSuite = (CompoundTerm)Term.Parse(testSuiteAsString); foreach (CompoundTerm testCaseTerm in testSuite.Arguments) { Sequence <CompoundTerm> testCase = testCaseTerm.Arguments.Convert <CompoundTerm>(delegate(Term t) { return((CompoundTerm)t); }); testcases = testcases.AddLast(testCase); } } catch (Exception e) { throw new ModelProgramUserException("Cannot create test suite: " + e.Message); } } #endregion #region load the fsms if any Dictionary <string, FSM> fsms = new Dictionary <string, FSM>(); if (settings.fsm != null && settings.fsm.Length > 0) { try { foreach (string fsmFile in settings.fsm) { System.IO.StreamReader fsmReader = new System.IO.StreamReader(fsmFile); string fsmAsString = fsmReader.ReadToEnd(); fsmReader.Close(); fsms[fsmFile] = FSM.FromTerm(CompoundTerm.Parse(fsmAsString)); } } catch (Exception e) { throw new ModelProgramUserException("Cannot create fsm: " + e.Message); } } #endregion if (mp == null && testcases.IsEmpty && fsms.Count == 0) { throw new ModelProgramUserException("No model, fsm, or test suite was given."); } if (fsms.Count > 0) { foreach (string fsmName in fsms.Keys) { ModelProgram fsmmp = new FsmModelProgram(fsms[fsmName], fsmName); if (mp == null) { mp = fsmmp; } else { mp = new ProductModelProgram(mp, fsmmp); } } } #region create the model stepper IStrategy ms; if (!testcases.IsEmpty) { ms = new TestSuiteStepper(settings.startTestAction, testcases, mp); } else { ms = CreateModelStepper(libs, mp, settings.modelStepper, settings.coverage); } #endregion confTester = new ConformanceTester(ms, implStepper); #region configure conformance tester settings confTester.ContinueOnFailure = settings.continueOnFailure; confTester.StepsCnt = (testcases.IsEmpty ? settings.steps : 0); confTester.MaxStepsCnt = (testcases.IsEmpty ? settings.maxSteps : 0); confTester.RunsCnt = (testcases.IsEmpty ? settings.runs : testcases.Count); confTester.WaitAction = settings.waitAction; confTester.TimeoutAction = settings.timeoutAction; Symbol waitActionSymbol = confTester.waitActionSet.Choose(); Symbol timeoutActionSymbol = confTester.timeoutAction.FunctionSymbol1; Set <Symbol> obs = new Set <string>(settings.observableAction).Convert <Symbol>(delegate(string s) { return(Symbol.Parse(s)); }); confTester.ObservableActionSymbols = obs; Set <Symbol> cleanup = new Set <string>(settings.cleanupAction).Convert <Symbol>(delegate(string s) { return(Symbol.Parse(s)); }); confTester.CleanupActionSymbols = cleanup; if (confTester.IsAsync) { //remove the wait and timeout action symbol from tester action symbols if (confTester.testerActionSymbols.Contains(waitActionSymbol) || confTester.testerActionSymbols.Contains(timeoutActionSymbol)) { confTester.testerActionSymbols = confTester.testerActionSymbols.Remove(waitActionSymbol).Remove(timeoutActionSymbol); } } Set <Symbol> internals = new Set <string>(settings.internalAction).Convert <Symbol>(delegate(string s) { return(Symbol.Parse(s)); }); confTester.InternalActionSymbols = (testcases.IsEmpty || settings.startTestAction != "Test" ? internals : internals.Add(Symbol.Parse("Test"))); TimeSpan timeout = new TimeSpan(0, 0, 0, 0, settings.timeout); confTester.TesterActionTimeout = delegate(IState s, CompoundTerm a) { return(timeout); }; confTester.Logfile = settings.logfile; confTester.OverwriteLog = settings.overwriteLog; if (settings.randomSeed != 0) { confTester.RandomSeed = settings.randomSeed; } #endregion //finally, run the application confTester.Run(); } catch (ModelProgramUserException) { throw; } catch (ConformanceTesterException e) { throw new ModelProgramUserException(e.Message); } finally { if (confTester != null) { confTester.Dispose(); } } }
// <summary> // Finds errors related to splitting Conditions // 1. Condition value is repeated across multiple types // 2. A Column/attribute is mapped but also used as a condition // </summary> private void MatchConditionErrors() { var leftCellWrappers = m_viewgenContext.AllWrappersForExtent; //Stores violating Discriminator (condition member) so that we dont repeat the same error var mappedConditionMembers = new Set <MemberPath>(); //Both of these data-structs help in finding duplicate conditions var setOfconditions = new Set <CompositeCondition>(new ConditionComparer()); var firstLCWForCondition = new Dictionary <CompositeCondition, LeftCellWrapper>(new ConditionComparer()); foreach (var leftCellWrapper in leftCellWrappers) { var condMembersValues = new CompositeCondition(); var cellQuery = leftCellWrapper.OnlyInputCell.GetLeftQuery(m_viewgenContext.ViewTarget); foreach (var condition in cellQuery.GetConjunctsFromWhereClause()) { var memberPath = condition.RestrictedMemberSlot.MemberPath; if (!m_domainMap.IsConditionMember(memberPath)) { continue; } var scalarCond = condition as ScalarRestriction; //Check for mapping of Scalar member condition, ignore type conditions if (scalarCond != null && !mappedConditionMembers.Contains(memberPath) && /* prevents duplicate errors */ !leftCellWrapper.OnlyInputCell.CQuery.WhereClause.Equals(leftCellWrapper.OnlyInputCell.SQuery.WhereClause) && /* projection allowed when both conditions are equal */ !IsMemberPartOfNotNullCondition(leftCellWrappers, memberPath, m_viewgenContext.ViewTarget)) { //This member should not be mapped CheckThatConditionMemberIsNotMapped(memberPath, leftCellWrappers, mappedConditionMembers); } //If a not-null condition is specified on a nullable column, //check that the property it is mapped to in the fragment is non-nullable, //unless there is a not null condition on the property that is being mapped it self. //Otherwise return an error. if (m_viewgenContext.ViewTarget == ViewTarget.UpdateView) { if (scalarCond != null && memberPath.IsNullable && IsMemberPartOfNotNullCondition(new[] { leftCellWrapper }, memberPath, m_viewgenContext.ViewTarget)) { var rightMemberPath = GetRightMemberPath(memberPath, leftCellWrapper); if (rightMemberPath != null && rightMemberPath.IsNullable && !IsMemberPartOfNotNullCondition(new[] { leftCellWrapper }, rightMemberPath, m_viewgenContext.ViewTarget)) { m_errorLog.AddEntry( new ErrorLog.Record( ViewGenErrorCode.ErrorPatternConditionError, Strings.Viewgen_ErrorPattern_NotNullConditionMappedToNullableMember( memberPath, rightMemberPath ), leftCellWrapper.OnlyInputCell, "")); } } } //CheckForDuplicateConditionValue //discover a composite condition of the form {path1=x, path2=y, ...} foreach (var element in condition.Domain.Values) { Set <Constant> values; //if not in the dict, add it if (!condMembersValues.TryGetValue(memberPath, out values)) { values = new Set <Constant>(Constant.EqualityComparer); condMembersValues.Add(memberPath, values); } values.Add(element); } } //foreach condition if (condMembersValues.Count > 0) //it is possible that there are no condition members { //Check if the composite condition has been encountered before if (setOfconditions.Contains(condMembersValues)) { //Extents may be Equal on right side (e.g: by some form of Refconstraint) if (!RightSideEqual(firstLCWForCondition[condMembersValues], leftCellWrapper)) { //error duplicate conditions m_errorLog.AddEntry( new ErrorLog.Record( ViewGenErrorCode.ErrorPatternConditionError, Strings.Viewgen_ErrorPattern_DuplicateConditionValue( BuildCommaSeparatedErrorString(condMembersValues.Keys) ), ToIEnum(firstLCWForCondition[condMembersValues].OnlyInputCell, leftCellWrapper.OnlyInputCell), "")); } } else { setOfconditions.Add(condMembersValues); //Remember which cell the condition came from.. used for error reporting firstLCWForCondition.Add(condMembersValues, leftCellWrapper); } } } //foreach fragment related to the Extent we are working on }
public void AddTestEmptySet() { Assert.IsFalse(set.Contains(1)); Assert.AreEqual(0, set.Count); Assert.IsTrue(set.Add(1)); Assert.AreEqual(1, set.Count); Assert.IsTrue(set.Contains(1)); }
/// <summary> /// Gets the shortest network path from the current frame element to another frame element /// </summary> /// <param name="destinationFrameElement">Destination frame element</param> /// <param name="searchRelations">Relations to search</param> /// <param name="searchDirection">Relation direction to search</param> /// <param name="maxDepth">Maximum depth to search within the network (i.e., maximum distance destination frame element can be from the current one)</param> /// <param name="frameElementPath">Path from this frame element to the destination frame element, or null for no path</param> /// <param name="relationPath">Relation path between this frame element and the destination frame element, or null for no path</param> /// <returns>True if path exists, false otherwise</returns> public bool GetShortestPathTo(FrameElement destinationFrameElement, Set <Frame.FrameRelation> searchRelations, Frame.FrameRelationDirection searchDirection, int maxDepth, out List <FrameElement> frameElementPath, out List <Frame.FrameRelation> relationPath) { frameElementPath = null; relationPath = null; // breadth-first search originating at the current frame element Queue <FrameElement> searchQueue = new Queue <FrameElement>(); _frameElementSearchBackPointer = null; // make sure to null out the source frame element back pointer searchQueue.Enqueue(this); Set <FrameElement> frameElementsEncountered = new Set <FrameElement>(); // keep track of frame elements we see so we don't enter any cycles frameElementsEncountered.Add(this); int currentDepth = 0; // tracks current search depth int nodesAtCurrentDepth = 1; // tracks nodes at current search depth int nodesAtCurrentDepthPlusOne = 0; // tracks nodes at one beyond the current search depth while (searchQueue.Count > 0 && currentDepth <= maxDepth) { FrameElement currentFrameElement = searchQueue.Dequeue(); // check for destination frame element if (currentFrameElement == destinationFrameElement) { // create path by following backpointers frameElementPath = new List <FrameElement>(); relationPath = new List <Frame.FrameRelation>(); while (destinationFrameElement != null) { frameElementPath.Add(destinationFrameElement); // back up to previous frame element FrameElement previousFrameElement = destinationFrameElement.FrameElementSearchBackPointer; // if the previous frame element isn't null, record the relationship if (previousFrameElement != null) { relationPath.Add(destinationFrameElement.FrameRelationSearchBackPointer); } destinationFrameElement = previousFrameElement; } // reverse paths to be from the current to the destination frame elements frameElementPath.Reverse(); relationPath.Reverse(); if (frameElementPath[0] != this) { throw new Exception("Path should start at current frame element"); } if (frameElementPath.Count != relationPath.Count + 1) { throw new Exception("Path length mismatch between frame elements and relations"); } if (frameElementPath.Count - 1 > maxDepth) { throw new Exception("Exceeded maximum allowed search depth"); } return(true); } // queue up frame elements related to the current one by any of the given relations int nodesAdded = 0; foreach (Frame.FrameRelation searchRelation in searchRelations) { // add sub-FEs if (searchDirection == Frame.FrameRelationDirection.Sub || searchDirection == Frame.FrameRelationDirection.Both) { foreach (FrameElement subFE in currentFrameElement._relationSubFrameElements[searchRelation]) { if (!frameElementsEncountered.Contains(subFE)) { subFE._frameElementSearchBackPointer = currentFrameElement; subFE._frameRelationSearchBackPointer = searchRelation; searchQueue.Enqueue(subFE); frameElementsEncountered.Add(subFE); ++nodesAdded; } } } // add super-FEs if (searchDirection == Frame.FrameRelationDirection.Super || searchDirection == Frame.FrameRelationDirection.Both) { foreach (FrameElement superFE in currentFrameElement._relationSuperFrameElements[searchRelation]) { if (!frameElementsEncountered.Contains(superFE)) { superFE._frameElementSearchBackPointer = currentFrameElement; superFE._frameRelationSearchBackPointer = searchRelation; searchQueue.Enqueue(superFE); frameElementsEncountered.Add(superFE); ++nodesAdded; } } } } // all generated search nodes belong in the next depth level nodesAtCurrentDepthPlusOne += nodesAdded; // if there aren't any nodes left at the current depth level, move to next level out if (--nodesAtCurrentDepth == 0) { nodesAtCurrentDepth = nodesAtCurrentDepthPlusOne; nodesAtCurrentDepthPlusOne = 0; currentDepth++; } } return(false); }