public IOrderedDictionary GetSelectedRowData() { IOrderedDictionary result; if (null == this.SelectedRow) { result = null; } else { OrderedDictionary fieldValues = new OrderedDictionary(); foreach (object field in this.CreateColumns(null, false)) { if (field is BoundField && !fieldValues.Contains(((BoundField)field).DataField)) { fieldValues.Add(((BoundField)field).DataField, null); } } string[] dataKeyNames = this.DataKeyNames; for (int i = 0; i < dataKeyNames.Length; i++) { string key = dataKeyNames[i]; if (!fieldValues.Contains(key)) { fieldValues.Add(key, null); } } this.ExtractRowValues(fieldValues, this.SelectedRow, true, true); result = fieldValues; } return result; }
/// <summary> /// All shared queries must go here. /// </summary> /// <param name="queryName"></param> /// <param name="parameters"></param> /// <returns></returns> public static IQueryable<User> GetQuery(string queryName, OrderedDictionary parameters) { // Get parameters. string[] qa = queryName.Split('/'); string p0 = qa[0]; string p1 = (qa.Length > 1) ? qa[1] : string.Empty; string p2 = (qa.Length > 2) ? qa[2] : string.Empty; // Set predefined query. IQueryable<User> query = null; Guid userId = parameters.Contains("UserId") ? (Guid)parameters["UserId"] : Guid.Empty; var db = SecurityEntities.Current; UserQueryName qne = GuidEnum.TryParse<UserQueryName>(p0, UserQueryName.None, true); switch (qne) { case UserQueryName.All: query = from row in db.Users select row; break; default: throw new NotImplementedException(string.Format("{0} QueryName not supported", queryName)); //break; } // Add search condition. if (parameters != null) { // Apply search filter. string searchValue; searchValue = parameters.Contains("SearchName") ? (string)parameters["SearchName"] : string.Empty; if (!string.IsNullOrEmpty(searchValue)) { searchValue = searchValue.Trim(); Guid uid; if (Guid.TryParse(searchValue, out uid)) { query = query.Where(x => x.UserId == uid); } else { // we cant use FullText index inside linq so just extend command timout in order for // search not to fail. if (db.CommandTimeout < 120) db.CommandTimeout = 120; query = query.Where(x => x.UserName == searchValue); } } } return query; }
public void PassingEqualityComparers() { var eqComp = new CaseInsensitiveEqualityComparer(); var d1 = new OrderedDictionary(eqComp); d1.Add("foo", "bar"); Assert.Throws<ArgumentException>(() => d1.Add("FOO", "bar")); // The equality comparer should also test for a non-existent key d1.Remove("foofoo"); Assert.True(d1.Contains("foo")); // Make sure we can change an existent key that passes the equality comparer d1["FOO"] = "barbar"; Assert.Equal("barbar", d1["foo"]); d1.Remove("FOO"); Assert.False(d1.Contains("foo")); }
private void Common (OrderedDictionary od) { Assert.IsNotNull (od.GetEnumerator (), "GetEnumerator"); Assert.AreEqual (0, od.Count, "Count-0"); Assert.IsFalse (od.IsReadOnly, "IsReadOnly"); od.Add ("a", "1"); Assert.AreEqual (1, od.Count, "Count-1"); od["a"] = "11"; Assert.AreEqual ("11", od["a"], "this[string]"); od[0] = "111"; Assert.AreEqual ("111", od[0], "this[int]"); DictionaryEntry[] array = new DictionaryEntry[2]; od.CopyTo (array, 1); Assert.AreEqual ("111", ((DictionaryEntry)array[1]).Value, "CopyTo"); Assert.AreEqual (1, od.Keys.Count, "Keys"); Assert.AreEqual (1, od.Values.Count, "Values"); Assert.IsTrue (od.Contains ("a"), "Contains(a)"); Assert.IsFalse (od.Contains ("111"), "Contains(111)"); od.Insert (0, "b", "2"); Assert.AreEqual (2, od.Count, "Count-2"); od.Add ("c", "3"); Assert.AreEqual (3, od.Count, "Count-3"); OrderedDictionary ro = od.AsReadOnly (); od.RemoveAt (2); Assert.AreEqual (2, od.Count, "Count-4"); Assert.IsFalse (od.Contains ("c"), "Contains(c)"); od.Remove ("b"); Assert.AreEqual (1, od.Count, "Count-5"); Assert.IsFalse (od.Contains ("b"), "Contains(b)"); od.Clear (); Assert.AreEqual (0, od.Count, "Count-6"); Assert.IsTrue (ro.IsReadOnly, "IsReadOnly-2"); // it's a read-only reference Assert.AreEqual (0, od.Count, "Count-7"); }
protected virtual void ExpandDependencies(ResourceDefinition resource, RequireSettings settings, OrderedDictionary allResources) { if (resource == null) { return; } // Settings is given so they can cascade down into dependencies. For example, if Foo depends on Bar, and Foo's required // location is Head, so too should Bar's location. // forge the effective require settings for this resource // (1) If a require exists for the resource, combine with it. Last settings in gets preference for its specified values. // (2) If no require already exists, form a new settings object based on the given one but with its own type/name. settings = allResources.Contains(resource) ? ((RequireSettings)allResources[resource]).Combine(settings) : new RequireSettings { Type = resource.Type, Name = resource.Name }.Combine(settings); if (resource.Dependencies != null) { var dependencies = from d in resource.Dependencies select FindResource(new RequireSettings { Type = resource.Type, Name = d }); foreach (var dependency in dependencies) { if (dependency == null) { continue; } ExpandDependencies(dependency, settings, allResources); } } allResources[resource] = settings; }
private bool HandleCommand(string commandName) { DataSourceView view = null; if (IsDataBindingAutomatic) { view = GetData(); if (view == null) { throw new HttpException(SR.GetString(SR.View_DataSourceReturnedNullView, ID)); } } else { return false; } if (!view.CanExecute(commandName)) { return false; } OrderedDictionary values = new OrderedDictionary(); OrderedDictionary keys = new OrderedDictionary(); ExtractRowValues(values, true /*includeReadOnlyFields*/, false /*includePrimaryKey*/); foreach (DictionaryEntry entry in DataKey.Values) { keys.Add(entry.Key, entry.Value); if (values.Contains(entry.Key)) { values.Remove(entry.Key); } } view.ExecuteCommand(commandName, keys, values, HandleCommandCallback); return true; }
protected void ExportGradeSheet(object sender, EventArgs e) { DataHandler db = new DataHandler(); using (db.Connect()) { db.Command("ListAllCourseAndGradesForInstructor", true); db.Add("@instructor_id", SessionManager.Instructor.ID); db.Start(); SqlDataReader reader = db.Exec(); SpreadSheetExport excelExport = new SpreadSheetExport(); excelExport.CreateSpreadSheet(); //These values will keep track of the utilized Assignments and Students string prevCourse = ""; string prevAssignment = ""; string prevStudent = ""; //Will use these values to keep track of where I'm placing values string student_column = "A"; //This shouldn't change uint student_row_value = 2; string assign_column = "B"; uint assgn_row_value = 1; //This shouldn't change. //I will only increment student_row_value and assign_column when new fields are added //---------------------------------------------------------------- OrderedDictionary trackedReferences = new OrderedDictionary(); //My current strategy to solve the issue of keeping track which Student and Assignment is what and where to place //the associated grade is to keep track of said Student and Assignment in an associative array. As I come across each //Student or assignment I will store thier col and row values in the array. Once I come across said Student or Assignment //Again I will reference it and use that value to put them in their rightful place. if (reader.HasRows) { while (reader.Read()) { string currentCourse = reader["course_name"].ToString(); string currentStudent = reader["student_name"].ToString(); string currentAssignment = reader["assignment_name"].ToString(); string currentAssignmentGrade = reader["assignment_grade"].ToString(); //This section determines whether we have a new Course to add or not if (!prevCourse.Equals(currentCourse) && !prevCourse.Equals("")) //We found a new Course name and it's not the first sheet { excelExport.AddWorkSheet(currentCourse); student_row_value = 2; assign_column = "B"; //Reset these values back to thier defaults since we're going to be using a new sheet } else if (prevCourse.Equals("")) //This means our first runthrough. We're going to need the name of this course now { prevCourse = currentCourse; excelExport.AddWorkSheet(currentCourse); } //We've come across a different assignment but we've seen it before AND not a new student //if (!prevAssignment.Equals(reader["assignment_name"].ToString()) && // (trackedReferences.Contains(reader["assignment_name"].ToString()) && trackedReferences.Contains(reader["student_name"].ToString()))) if (trackedReferences.Contains(currentStudent) && trackedReferences.Contains(currentAssignment)) { excelExport.InsertTextInCell(currentAssignmentGrade, trackedReferences[currentAssignment].ToString(), (uint)trackedReferences[currentStudent], currentCourse); } //New assignment and haven't added it before and existing student else if ((!prevAssignment.Equals(currentAssignment) && !trackedReferences.Contains(currentAssignment)) && trackedReferences.Contains(currentStudent)) { excelExport.InsertTextInCell(currentAssignment, assign_column, assgn_row_value, currentCourse); trackedReferences.Add(currentAssignment, assign_column); excelExport.InsertTextInCell(currentAssignmentGrade, assign_column, (uint)trackedReferences[currentStudent], currentCourse); assign_column = excelExport.IncrementColRef(assign_column); } //New student found but not assignment else if ((!prevStudent.Equals(currentStudent) && !trackedReferences.Contains(currentStudent)) && trackedReferences.Contains(currentAssignment)) { excelExport.InsertTextInCell(currentStudent, student_column, student_row_value, currentCourse); trackedReferences.Add(currentStudent, student_row_value); excelExport.InsertTextInCell(currentAssignmentGrade, trackedReferences[currentAssignment].ToString(), student_row_value, currentCourse); student_row_value++; } else if((!trackedReferences.Contains(currentStudent) && !trackedReferences.Contains(currentAssignment)) || (prevAssignment.Equals("") && prevStudent.Equals(""))) //This appears to be a new student and assignment. Add it and place in dictionary { excelExport.InsertTextInCell(currentStudent, student_column, student_row_value, currentCourse); trackedReferences.Add(currentStudent, student_row_value); excelExport.InsertTextInCell(currentAssignment, assign_column, assgn_row_value, currentCourse); trackedReferences.Add(currentAssignment, assign_column); excelExport.InsertTextInCell(currentAssignmentGrade, assign_column, student_row_value, currentCourse); student_row_value++; assign_column = excelExport.IncrementColRef(assign_column); } //This may look weird, but I'm doing this to capture the previous value in this variable because on next iration the real current values will be captured prevCourse = currentCourse; prevAssignment = currentAssignment; prevStudent = currentStudent; } } excelExport.ExcelToResponse(); db.Stop(); } }
private bool HandleCommand(string commandName) { DataSourceView data = null; if (base.IsBoundUsingDataSourceID) { data = this.GetData(); if (data == null) { throw new HttpException(System.Web.SR.GetString("View_DataSourceReturnedNullView", new object[] { this.ID })); } } else { return false; } if (!data.CanExecute(commandName)) { return false; } OrderedDictionary fieldValues = new OrderedDictionary(); OrderedDictionary keys = new OrderedDictionary(); this.ExtractRowValues(fieldValues, true, false); foreach (DictionaryEntry entry in this.DataKey.Values) { keys.Add(entry.Key, entry.Value); if (fieldValues.Contains(entry.Key)) { fieldValues.Remove(entry.Key); } } data.ExecuteCommand(commandName, keys, fieldValues, new DataSourceViewOperationCallback(this.HandleCommandCallback)); return true; }
private bool HandleCommand(ListViewItem item, int itemIndex, string commandName) { DataSourceView view = null; if (IsDataBindingAutomatic) { view = GetData(); if (view == null) { throw new InvalidOperationException(String.Format(CultureInfo.CurrentCulture, AtlasWeb.ListView_NullView, ID)); } } else { return false; } if (!view.CanExecute(commandName)) { return false; } ListViewDataItem dataItem = item as ListViewDataItem; if (itemIndex < 0 && dataItem == null) { throw new InvalidOperationException(AtlasWeb.ListView_InvalidCommand); } OrderedDictionary values = new OrderedDictionary(); OrderedDictionary keys = new OrderedDictionary(); if (item != null) { ExtractItemValues(values, item, false /*includePrimaryKey*/); } if (DataKeys.Count > itemIndex) { foreach (DictionaryEntry entry in DataKeys[itemIndex].Values) { keys.Add(entry.Key, entry.Value); if (values.Contains(entry.Key)) { values.Remove(entry.Key); } } } view.ExecuteCommand(commandName, keys, values, HandleCommandCallback); return true; }
public void CopyToTests() { var d = new OrderedDictionary(); d["foo"] = "bar"; d[" "] = "asd"; DictionaryEntry[] arr = new DictionaryEntry[3]; Assert.Throws<ArgumentNullException>(() => d.CopyTo(null, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => d.CopyTo(arr, -1)); Assert.Throws<ArgumentException>(() => d.CopyTo(arr, 3)); d.CopyTo(arr, 0); for (int i = 0; i < 2; i++) { Assert.True(d.Contains(arr[i].Key)); Assert.Equal(d[arr[i].Key], arr[i].Value); } Assert.NotEqual(arr[0].Key, arr[1].Key); d.CopyTo(arr, 1); for (int i = 1; i < 3; i++) { Assert.True(d.Contains(arr[i].Key)); Assert.Equal(d[arr[i].Key], arr[i].Value); } Assert.NotEqual(arr[1].Key, arr[2].Key); }
public IOrderedDictionary GetValues (HttpContext context, Control control) { OrderedDictionary values = new OrderedDictionary (); foreach (Parameter param in this) { string name = param.Name; for (int i = 1; values.Contains (name); i++) name = param.Name + i.ToString (); values.Add (name, param.GetValue (context, control)); } return values; }
IOrderedDictionary MergeParameterValues (ParameterCollection viewParams, IDictionary values, IDictionary oldValues, bool allwaysAddNewValues) { OrderedDictionary mergedValues = new OrderedDictionary (); foreach (Parameter p in viewParams) { bool oldAdded = false; if (oldValues != null && oldValues.Contains (p.Name)) { object val = Convert.ChangeType (oldValues [p.Name], p.Type); mergedValues [FormatOldParameter (p.Name)] = val; oldAdded = true; } if (values != null && values.Contains (p.Name)) { object val = Convert.ChangeType (values [p.Name], p.Type); mergedValues [p.Name] = val; } else if (!oldAdded || allwaysAddNewValues) { object val = p.GetValue (context, owner); mergedValues [p.Name] = val; } } if (values != null) { foreach (DictionaryEntry de in values) if (!mergedValues.Contains (de.Key)) mergedValues [de.Key] = de.Value; } if (oldValues != null) { foreach (DictionaryEntry de in oldValues) if (!mergedValues.Contains (FormatOldParameter ((string)de.Key))) mergedValues [FormatOldParameter ((string)de.Key)] = de.Value; } return mergedValues; }
public IOrderedDictionary GetValues(HttpContext context, Control control) { this.UpdateValues(context, control); IOrderedDictionary dictionary = new OrderedDictionary(); foreach (Parameter parameter in this) { string name = parameter.Name; for (int i = 1; dictionary.Contains(name); i++) { name = parameter.Name + i.ToString(CultureInfo.InvariantCulture); } dictionary.Add(name, parameter.ParameterValue); } return dictionary; }
private void playQueueContents(OrderedDictionary queueToPlay, Boolean isImmediateMessages) { long milliseconds = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; List<String> keysToPlay = new List<String>(); List<String> soundsProcessed = new List<String>(); Boolean oneOrMoreEventsEnabled = false; if (queueToPlay.Count > 0) { Console.WriteLine("Processing queue of " + queueToPlay.Count + " event(s)"); } lock (queueToPlay) { foreach (String key in queueToPlay.Keys) { QueuedMessage queuedMessage = (QueuedMessage)queueToPlay[key]; if (isImmediateMessages || queuedMessage.dueTime <= milliseconds) { if ((isImmediateMessages || !keepQuiet) && (queuedMessage.abstractEvent == null || queuedMessage.abstractEvent.isClipStillValid(key)) && !keysToPlay.Contains(key) && (!queuedMessage.gapFiller || playGapFillerMessage(queueToPlay)) && (queuedMessage.expiryTime == 0 || queuedMessage.expiryTime > milliseconds)) { keysToPlay.Add(key); } else { Console.WriteLine("Clip " + key + " is not valid"); soundsProcessed.Add(key); } } } if (keysToPlay.Count > 0) { if (keysToPlay.Count == 1 && clipIsPearlOfWisdom(keysToPlay[0]) && hasPearlJustBeenPlayed()) { Console.WriteLine("Rejecting pearl of wisdom " + keysToPlay[0] + " because one has been played in the last " + minTimeBetweenPearlsOfWisdom + " seconds"); soundsProcessed.Add(keysToPlay[0]); } else { foreach (String eventName in keysToPlay) { if ((eventName.StartsWith(QueuedMessage.compoundMessageIdentifier) && ((QueuedMessage)queueToPlay[eventName]).isValid) || enabledSounds.Contains(eventName)) { oneOrMoreEventsEnabled = true; } } } } if (queueToPlay.Count > 0 && keysToPlay.Count == 0) { Console.WriteLine("None of the " + queueToPlay.Count + " message(s) in this queue is due or valid"); } } Boolean wasInterrupted = false; if (oneOrMoreEventsEnabled) { // block for immediate messages... if (isImmediateMessages) { lock (queueToPlay) { openRadioChannelInternal(); soundsProcessed.AddRange(playSounds(keysToPlay, isImmediateMessages, out wasInterrupted)); } } else { // for queued messages, allow other messages to be inserted into the queue while these are being read openRadioChannelInternal(); soundsProcessed.AddRange(playSounds(keysToPlay, isImmediateMessages, out wasInterrupted)); } } else { soundsProcessed.AddRange(keysToPlay); } if (soundsProcessed.Count > 0) { lock (queueToPlay) { foreach (String key in soundsProcessed) { if (queueToPlay.Contains(key)) { queueToPlay.Remove(key); } } } } if (queueHasDueMessages(queueToPlay, isImmediateMessages) && !wasInterrupted && !isImmediateMessages) { Console.WriteLine("There are " + queueToPlay.Count + " more events in the queue, playing them..."); playQueueContents(queueToPlay, isImmediateMessages); } }
private OrderedDictionary ToDictionary(IEnumerable<string> keywords) { var od = new OrderedDictionary(); foreach (var k in keywords) { if (!od.Contains(k)) { od.Add(k, null); } } return od; }
internal AttributeCollection GetAttributes() { if (this._attributes == null) { Attribute[] sourceArray = ReflectTypeDescriptionProvider.ReflectGetAttributes(this._type); for (Type type = this._type.BaseType; (type != null) && (type != typeof(object)); type = type.BaseType) { Attribute[] attributeArray2 = ReflectTypeDescriptionProvider.ReflectGetAttributes(type); Attribute[] destinationArray = new Attribute[sourceArray.Length + attributeArray2.Length]; Array.Copy(sourceArray, 0, destinationArray, 0, sourceArray.Length); Array.Copy(attributeArray2, 0, destinationArray, sourceArray.Length, attributeArray2.Length); sourceArray = destinationArray; } int length = sourceArray.Length; foreach (Type type2 in this._type.GetInterfaces()) { if ((type2.Attributes & TypeAttributes.NestedPrivate) != TypeAttributes.AnsiClass) { AttributeCollection attributes = TypeDescriptor.GetAttributes(type2); if (attributes.Count > 0) { Attribute[] attributeArray4 = new Attribute[sourceArray.Length + attributes.Count]; Array.Copy(sourceArray, 0, attributeArray4, 0, sourceArray.Length); attributes.CopyTo(attributeArray4, sourceArray.Length); sourceArray = attributeArray4; } } } OrderedDictionary dictionary = new OrderedDictionary(sourceArray.Length); for (int i = 0; i < sourceArray.Length; i++) { bool flag = true; if (i >= length) { for (int j = 0; j < ReflectTypeDescriptionProvider._skipInterfaceAttributeList.Length; j++) { if (ReflectTypeDescriptionProvider._skipInterfaceAttributeList[j].IsInstanceOfType(sourceArray[i])) { flag = false; break; } } } if (flag && !dictionary.Contains(sourceArray[i].TypeId)) { dictionary[sourceArray[i].TypeId] = sourceArray[i]; } } sourceArray = new Attribute[dictionary.Count]; dictionary.Values.CopyTo(sourceArray, 0); this._attributes = new AttributeCollection(sourceArray); } return this._attributes; }
public IOrderedDictionary ExtractValues(Control container) { if (null == this.innerControl) { throw new InvalidOperationException("InnerControl not set"); } if (null == this.fieldsSection) { throw new InvalidOperationException("FieldsSection not set"); } OrderedDictionary values = new OrderedDictionary(); Dictionary<string, Control> map = new Dictionary<string, Control>(); this.BuildControlsMap(map, this.innerControl); int rcount = 0; foreach (IConfigurationElement rowElement in this.fieldsSection.Elements.Values) { int ccount = 0; foreach (IConfigurationElement fieldElement in rowElement.Elements.Values) { string id = string.Concat(new object[] { "tr", rcount, "tc", ccount, fieldElement.ConfigKey }); if (map.ContainsKey(id)) { Control fieldControl = map[id]; foreach (IConfigurationElement propertyElement in fieldElement.Elements.Values) { if (propertyElement.Attributes.ContainsKey("pull") && propertyElement.Attributes.ContainsKey("member")) { string pull = propertyElement.GetAttributeReference("pull").Value.ToString(); string member = propertyElement.GetAttributeReference("member").Value.ToString(); if (!values.Contains(pull)) { values.Add(pull, ReflectionServices.ExtractValue(fieldControl, member)); } } if (propertyElement.Attributes.ContainsKey("push") && propertyElement.Attributes.ContainsKey("member")) { string push = propertyElement.GetAttributeReference("push").Value.ToString(); string member = propertyElement.GetAttributeReference("member").Value.ToString(); if (!values.Contains(push)) { values.Add(push, ReflectionServices.ExtractValue(fieldControl, member)); } } } } ccount++; } rcount++; } return values; }
public void KeysPropertyContainsAllKeys() { var d = new OrderedDictionary(); var alreadyChecked = new bool[1000]; for (int i = 0; i < 1000; i++) { d["test_" + i] = i; alreadyChecked[i] = false; } ICollection keys = d.Keys; Assert.False(keys.IsSynchronized); Assert.NotEqual(d, keys.SyncRoot); Assert.Equal(d.Count, keys.Count); foreach (var key in d.Keys) { string skey = (string)key; var p = skey.Split(new char[] { '_' }); Assert.Equal(2, p.Length); int number = int.Parse(p[1]); Assert.False(alreadyChecked[number]); Assert.True(number >= 0 && number < 1000); alreadyChecked[number] = true; } object[] array = new object[keys.Count + 50]; keys.CopyTo(array, 50); for (int i = 50; i < array.Length; i++) { Assert.True(d.Contains(array[i])); } Assert.Throws<ArgumentNullException>("array", () => keys.CopyTo(null, 0)); Assert.Throws<ArgumentOutOfRangeException>("index", () => keys.CopyTo(new object[keys.Count], -1)); }
/// <devdoc> /// Returns an IDictionary containing Name / Value pairs of all the parameters. /// </devdoc> public IOrderedDictionary GetValues(HttpContext context, Control control) { UpdateValues(context, control); // Create dictionary IOrderedDictionary valueDictionary = new OrderedDictionary(); // Add Parameters foreach (Parameter param in this) { // For the OrderedDictionary, every parameter must have a unique name, so in some cases we have to alter them. string uniqueName = param.Name; int count = 1; while (valueDictionary.Contains(uniqueName)) { uniqueName = param.Name + count.ToString(CultureInfo.InvariantCulture); count++; } valueDictionary.Add(uniqueName, param.ParameterValue); } return valueDictionary; }
public void DetailsView_ExtractRowValues () { PokerDetailsView dv = new PokerDetailsView (); DataTable ds = TableObject.CreateDataTable (); dv.DataSource = ds; dv.DataBind (); OrderedDictionary fieldsValues = new OrderedDictionary (); dv.DoExtractRowValues (fieldsValues, true, true); Assert.AreEqual (3, fieldsValues.Count, "ExtractRowValues1"); Assert.AreEqual (3, fieldsValues.Keys.Count, "ExtractRowValues2"); Assert.AreEqual (3, fieldsValues.Values.Count, "ExtractRowValues3"); Assert.AreEqual (true, fieldsValues.Contains ("ID"), "ExtractRowValues4"); IDictionaryEnumerator enumerator=fieldsValues.GetEnumerator (); enumerator.MoveNext (); Assert.AreEqual ("ID",enumerator.Key,"FieldValue1"); Assert.AreEqual ("1001", enumerator.Value , "FieldValue2"); enumerator.MoveNext (); Assert.AreEqual ("FName", enumerator.Key, "FieldValue3"); Assert.AreEqual ("Mahesh", enumerator.Value, "FieldValue4"); enumerator.MoveNext (); Assert.AreEqual ("LName", enumerator.Key, "FieldValue5"); Assert.AreEqual ("Chand", enumerator.Value, "FieldValue6"); fieldsValues = new OrderedDictionary (); dv.DoExtractRowValues (fieldsValues, false, false); Assert.AreEqual (0, fieldsValues.Count, "ExtractRowValues-NotReadOnly1"); fieldsValues = new OrderedDictionary (); dv.DoExtractRowValues (fieldsValues, true, false); Assert.AreEqual (3, fieldsValues.Count, "ExtractRowValues-NoPrimaryKeys1"); Assert.AreEqual (3, fieldsValues.Keys.Count, "ExtractRowValues-NoPrimaryKeys2"); fieldsValues = new OrderedDictionary (); dv.DoExtractRowValues (fieldsValues, false, true); Assert.AreEqual (0, fieldsValues.Count, "ExtractRowValues-NotReadOnly2"); Assert.AreEqual (0, fieldsValues.Keys.Count, "ExtractRowValues-NotReadOnly3"); }
public void ContainsTests() { var d = new OrderedDictionary(); Assert.Throws<ArgumentNullException>(() => d.Contains(null)); Assert.False(d.Contains("foo")); for (int i = 0; i < 1000; i++) { var k = "test_" + i; d.Add(k, "asd"); Assert.True(d.Contains(k)); // different reference Assert.True(d.Contains("test_" + i)); } Assert.False(d.Contains("foo")); }
/// <summary> /// Now that the portData has been set for the new ports, we recreate the connections we /// so mercilessly destroyed, restoring peace and balance to the world once again. /// </summary> /// <param name="outportConnections"> List of the connections that were killed</param> private void LoadAndCreateConnectors(OrderedDictionary inportConnections, OrderedDictionary outportConnections) { //----------------------------Inputs--------------------------------- /* Input Port connections are matched only if the name is the same */ for (int i = 0; i < InPortData.Count; i++) { string varName = InPortData[i].ToolTipString; if (inportConnections.Contains(varName)) { if (inportConnections[varName] != null) { foreach (var startPortModel in (inportConnections[varName] as List<PortModel>)) { PortType p; NodeModel startNode = startPortModel.Owner; ConnectorModel connector = ConnectorModel.Make(startNode, this, startNode.GetPortIndexAndType(startPortModel, out p), i, PortType.INPUT); this.WorkSpace.Connectors.Add(connector); this.WorkSpace.UndoRecorder.RecordCreationForUndo(connector); } outportConnections[varName] = null; } } } //----------------------------Outputs-------------------------------- /*The matching is done in three parts: *Step 1: * First, it tries to match the connectors wrt to the defined * variable name. Hence it first checks to see if any of the old * variable names are present. If so, if there were any connectors * presnt then it makes the new connectors. As it iterates through * the new ports, it also finds the ports that didnt exist before */ List<int> undefinedIndices = new List<int>(); for (int i = 0; i < OutPortData.Count; i++) { string varName = OutPortData[i].ToolTipString; if (outportConnections.Contains(varName)) { if (outportConnections[varName] != null) { foreach (var endPortModel in (outportConnections[varName] as List<PortModel>)) { PortType p; NodeModel endNode = endPortModel.Owner; ConnectorModel connector = ConnectorModel.Make(this, endNode, i, endNode.GetPortIndexAndType(endPortModel, out p), PortType.INPUT); this.WorkSpace.Connectors.Add(connector); this.WorkSpace.UndoRecorder.RecordCreationForUndo(connector); } outportConnections[varName] = null; } } else undefinedIndices.Add(i); } /* *Step 2: * The second priority is to match the connections to the previous * indices. For all the ports that were not previously defined, it * now checks if that "numbered" port had any connections * previously, ie, if the old third port had 2 connections, then * these would go to the new 3rd port (if it is not a variable that * was defined before) */ for (int i = 0; i < undefinedIndices.Count; i++) { int index = undefinedIndices[i]; if (index < outportConnections.Count && outportConnections[index] != null) { foreach (PortModel endPortModel in (outportConnections[index] as List<PortModel>)) { PortType p; NodeModel endNode = endPortModel.Owner; ConnectorModel connector = ConnectorModel.Make(this, endNode, index, endNode.GetPortIndexAndType(endPortModel, out p), PortType.INPUT); WorkSpace.Connectors.Add(connector); WorkSpace.UndoRecorder.RecordCreationForUndo(connector); } outportConnections[index] = null; undefinedIndices.Remove(index); i--; } } /* *Step 2: * The final step. Now that the priorties are finished, the * function tries to reuse any existing connections by attaching * them to any ports that have not already been given connections */ List<List<PortModel>> unusedConnections = new List<List<PortModel>>(); foreach (List<PortModel> portModelList in outportConnections.Values.Cast<List<PortModel>>()) { if (portModelList == null) continue; unusedConnections.Add(portModelList); } while (undefinedIndices.Count > 0 && unusedConnections.Count != 0) { foreach (PortModel endPortModel in unusedConnections[0]) { PortType p; NodeModel endNode = endPortModel.Owner; ConnectorModel connector = ConnectorModel.Make( this, endNode, undefinedIndices[0], endNode.GetPortIndexAndType(endPortModel, out p), PortType.INPUT); WorkSpace.Connectors.Add(connector); WorkSpace.UndoRecorder.RecordCreationForUndo(connector); } undefinedIndices.RemoveAt(0); unusedConnections.RemoveAt(0); } }
public void RemoveTests() { var d = new OrderedDictionary(); // should work d.Remove("asd"); Assert.Throws<ArgumentNullException>(() => d.Remove(null)); for (var i = 0; i < 1000; i++) { d.Add("foo_" + i, "bar_" + i); } for (var i = 0; i < 1000; i++) { Assert.True(d.Contains("foo_" + i)); d.Remove("foo_" + i); Assert.False(d.Contains("foo_" + i)); Assert.Equal(1000 - i - 1, d.Count); } }
private void playQueueContents(OrderedDictionary queueToPlay, Boolean isImmediateMessages) { long milliseconds = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; List<String> keysToPlay = new List<String>(); List<String> soundsProcessed = new List<String>(); Boolean oneOrMoreEventsEnabled = false; lock (queueToPlay) { int willBePlayedCount = queueToPlay.Count; foreach (String key in queueToPlay.Keys) { QueuedMessage queuedMessage = (QueuedMessage)queueToPlay[key]; if (isImmediateMessages || queuedMessage.dueTime <= milliseconds) { Boolean messageHasExpired = queuedMessage.expiryTime != 0 && queuedMessage.expiryTime < milliseconds; Boolean messageIsStillValid = queuedMessage.isMessageStillValid(key, crewChief.currentGameState); Boolean queueTooLongForMessage = queuedMessage.maxPermittedQueueLengthForMessage != 0 && willBePlayedCount > queuedMessage.maxPermittedQueueLengthForMessage; if ((isImmediateMessages || !keepQuiet || queuedMessage.playEvenWhenSilenced) && queuedMessage.canBePlayed && messageIsStillValid && !keysToPlay.Contains(key) && !queueTooLongForMessage && !messageHasExpired) { keysToPlay.Add(key); } else { if (!messageIsStillValid) { Console.WriteLine("Clip " + key + " is not valid"); } else if (messageHasExpired) { Console.WriteLine("Clip " + key + " has expired"); } else if (queueTooLongForMessage) { List<String> keysToDisplay = new List<string>(); foreach (String keyToDisplay in queueToPlay.Keys) { keysToDisplay.Add(keyToDisplay); } Console.WriteLine("Queue is too long to play clip " + key + " max permitted items for this message = " + queuedMessage.maxPermittedQueueLengthForMessage + " queue: " + String.Join(", ", keysToDisplay)); } else if (!queuedMessage.canBePlayed) { Console.WriteLine("Clip " + key + " has some missing sound files"); } soundsProcessed.Add(key); willBePlayedCount--; } } } if (keysToPlay.Count > 0) { if (keysToPlay.Count == 1 && clipIsPearlOfWisdom(keysToPlay[0])) { if (hasPearlJustBeenPlayed()) { Console.WriteLine("Rejecting pearl of wisdom " + keysToPlay[0] + " because one has been played in the last " + minTimeBetweenPearlsOfWisdom + " seconds"); soundsProcessed.Add(keysToPlay[0]); } else if (disablePearlsOfWisdom) { Console.WriteLine("Rejecting pearl of wisdom " + keysToPlay[0] + " because pearls have been disabled for the last phase of the race"); soundsProcessed.Add(keysToPlay[0]); } } else { oneOrMoreEventsEnabled = true; } } } Boolean wasInterrupted = false; if (oneOrMoreEventsEnabled) { // block for immediate messages... if (isImmediateMessages) { lock (queueToPlay) { openRadioChannelInternal(); soundsProcessed.AddRange(playSounds(keysToPlay, isImmediateMessages, out wasInterrupted)); } } else { // for queued messages, allow other messages to be inserted into the queue while these are being read openRadioChannelInternal(); soundsProcessed.AddRange(playSounds(keysToPlay, isImmediateMessages, out wasInterrupted)); } } else { soundsProcessed.AddRange(keysToPlay); } if (soundsProcessed.Count > 0) { lock (queueToPlay) { foreach (String key in soundsProcessed) { if (queueToPlay.Contains(key)) { queueToPlay.Remove(key); } } } } if (queueHasDueMessages(queueToPlay, isImmediateMessages) && !wasInterrupted && !isImmediateMessages) { Console.WriteLine("There are " + queueToPlay.Count + " more events in the queue, playing them..."); playQueueContents(queueToPlay, isImmediateMessages); } }
/// <summary> /// Update any settings from a collection /// </summary> /// <param name="commands"> /// The commands. /// </param> public override void UpdateAnyCurrentSettings(IDictionary<string, int> commands) { if (commands.ContainsKey("bl") && commands.ContainsKey("wl")) { if (commands["bl"] < commands["wl"]) { // #1 Rule quickfix: post buf lin < post win lin, so remove buf lin setting gives win lin priority commands.Remove("bl"); } } if (commands.ContainsKey("bc") && commands.ContainsKey("wc")) { if (commands["bc"] < commands["wc"]) { // #2 Rule quickfix: post buf col < post win col, so remove buf col setting gives win col priority commands.Remove("bc"); } } var orderedCommands = new OrderedDictionary(); if ((commands.ContainsKey("bl") && commands.ContainsKey("wl")) || (commands.ContainsKey("bc") && commands.ContainsKey("wc"))) { // #3 Rule priority: must order win lin before buf lin, e.g. post buf lin < pre win lin // #4 Rule priority: must order win col before buf col, e.g. post buf col < pre win col var commandPriority = new Dictionary<int, string> { { 0, "wl" }, { 1, "wc" }, { 2, "bl" }, { 3, "bc" } }; for (int i = 0; i < commandPriority.Count; i++) { // append ordered dictionary pairs // e.g. { 0, "wl", commands["wl"] }, { 1, "bl", commands["bl"] } string cmd = commandPriority[i]; if (commands.ContainsKey(cmd)) { orderedCommands.Add(cmd, commands[cmd]); commands.Remove(cmd); } } } if (orderedCommands.Contains("bl") || commands.ContainsKey("bl")) { // Rule workaround: Console clear before "Console.BufferHeight = value" sets window position = 0. this.ConsoleClear(); } if (orderedCommands.Count > 0) { // use OrderedDictionary index order to retrieve keys (map index to key) var keys = new string[orderedCommands.Keys.Count]; orderedCommands.Keys.CopyTo(keys, 0); foreach (var key in keys) { this.UpdateCurrentSettingFor(key, int.Parse(orderedCommands[key].ToString())); } } foreach (var command in commands) { this.UpdateCurrentSettingFor(command.Key, command.Value); } }
private static ICollection PipelineFilter(int pipelineType, ICollection members, object instance, IDictionary cache) { bool flag; IComponent component = instance as IComponent; ITypeDescriptorFilterService filterService = null; if (component != null) { ISite site = component.Site; if (site != null) { filterService = site.GetService(typeof(ITypeDescriptorFilterService)) as ITypeDescriptorFilterService; } } IList list = members as ArrayList; if (filterService == null) { return members; } if ((cache != null) && ((list == null) || list.IsReadOnly)) { FilterCacheItem item = cache[_pipelineFilterKeys[pipelineType]] as FilterCacheItem; if ((item != null) && item.IsValid(filterService)) { return item.FilteredMembers; } } OrderedDictionary attributes = new OrderedDictionary(members.Count); switch (pipelineType) { case 0: foreach (Attribute attribute in members) { attributes[attribute.TypeId] = attribute; } flag = filterService.FilterAttributes(component, attributes); break; case 1: case 2: foreach (MemberDescriptor descriptor in members) { string name = descriptor.Name; if (attributes.Contains(name)) { string extenderCollisionSuffix = GetExtenderCollisionSuffix(descriptor); if (extenderCollisionSuffix != null) { attributes[name + extenderCollisionSuffix] = descriptor; } MemberDescriptor member = (MemberDescriptor) attributes[name]; extenderCollisionSuffix = GetExtenderCollisionSuffix(member); if (extenderCollisionSuffix != null) { attributes.Remove(name); attributes[member.Name + extenderCollisionSuffix] = member; } } else { attributes[name] = descriptor; } } if (pipelineType == 1) { flag = filterService.FilterProperties(component, attributes); } else { flag = filterService.FilterEvents(component, attributes); } break; default: flag = false; break; } if ((list == null) || list.IsReadOnly) { list = new ArrayList(attributes.Values); } else { list.Clear(); foreach (object obj2 in attributes.Values) { list.Add(obj2); } } if (flag && (cache != null)) { ICollection is2; switch (pipelineType) { case 0: { Attribute[] array = new Attribute[list.Count]; try { list.CopyTo(array, 0); } catch (InvalidCastException) { throw new ArgumentException(SR.GetString("TypeDescriptorExpectedElementType", new object[] { typeof(Attribute).FullName })); } is2 = new AttributeCollection(array); break; } case 1: { PropertyDescriptor[] descriptorArray = new PropertyDescriptor[list.Count]; try { list.CopyTo(descriptorArray, 0); } catch (InvalidCastException) { throw new ArgumentException(SR.GetString("TypeDescriptorExpectedElementType", new object[] { typeof(PropertyDescriptor).FullName })); } is2 = new PropertyDescriptorCollection(descriptorArray, true); break; } case 2: { EventDescriptor[] descriptorArray2 = new EventDescriptor[list.Count]; try { list.CopyTo(descriptorArray2, 0); } catch (InvalidCastException) { throw new ArgumentException(SR.GetString("TypeDescriptorExpectedElementType", new object[] { typeof(EventDescriptor).FullName })); } is2 = new EventDescriptorCollection(descriptorArray2, true); break; } default: is2 = null; break; } FilterCacheItem item2 = new FilterCacheItem(filterService, is2); cache[_pipelineFilterKeys[pipelineType]] = item2; cache.Remove(_pipelineAttributeFilterKeys[pipelineType]); } return list; }
/// <devdoc> /// Retrieves custom attributes. /// </devdoc> internal AttributeCollection GetAttributes() { // Worst case collision scenario: we don't want the perf hit // of taking a lock, so if we collide we will query for // attributes twice. Not a big deal. // if (_attributes == null) { TypeDescriptor.Trace("Attributes : Building collection for {0}", _type.Name); // Obtaining attributes follows a very critical order: we must take care that // we merge attributes the right way. Consider this: // // [A4] // interface IBase; // // [A3] // interface IDerived; // // [A2] // class Base : IBase; // // [A1] // class Derived : Base, IDerived // // Calling GetAttributes on type Derived must merge attributes in the following // order: A1 - A4. Interfaces always lose to types, and interfaces and types // must be merged in the same order. At the same time, we must be careful // that we don't always go through reflection here, because someone could have // created a custom provider for a type. Because there is only one instance // of ReflectTypeDescriptionProvider created for typeof(object), if our code // is invoked here we can be sure that there is no custom provider for // _type all the way up the base class chain. // We cannot be sure that there is no custom provider for // interfaces that _type implements, however, because they are not derived // from _type. So, for interfaces, we must go through TypeDescriptor // again to get the interfaces attributes. // Get the type's attributes. This does not recurse up the base class chain. // We append base class attributes to this array so when walking we will // walk from Length - 1 to zero. // Attribute[] attrArray = ReflectTypeDescriptionProvider.ReflectGetAttributes(_type); Type baseType = _type.BaseType; while (baseType != null && baseType != typeof(object)) { Attribute[] baseArray = ReflectTypeDescriptionProvider.ReflectGetAttributes(baseType); Attribute[] temp = new Attribute[attrArray.Length + baseArray.Length]; Array.Copy(attrArray, 0, temp, 0, attrArray.Length); Array.Copy(baseArray, 0, temp, attrArray.Length, baseArray.Length); attrArray = temp; baseType = baseType.BaseType; } // Next, walk the type's interfaces. We append these to // the attribute array as well. // int ifaceStartIdx = attrArray.Length; Type[] interfaces = _type.GetInterfaces(); TypeDescriptor.Trace("Attributes : Walking {0} interfaces", interfaces.Length); for(int idx = 0; idx < interfaces.Length; idx++) { Type iface = interfaces[idx]; // only do this for public interfaces. // if ((iface.Attributes & (TypeAttributes.Public | TypeAttributes.NestedPublic)) != 0) { // No need to pass an instance into GetTypeDescriptor here because, if someone provided a custom // provider based on object, it already would have hit. AttributeCollection ifaceAttrs = TypeDescriptor.GetAttributes(iface); if (ifaceAttrs.Count > 0) { Attribute[] temp = new Attribute[attrArray.Length + ifaceAttrs.Count]; Array.Copy(attrArray, 0, temp, 0, attrArray.Length); ifaceAttrs.CopyTo(temp, attrArray.Length); attrArray = temp; } } } // Finally, put all these attributes in a dictionary and filter out the duplicates. // OrderedDictionary attrDictionary = new OrderedDictionary(attrArray.Length); for (int idx = 0; idx < attrArray.Length; idx++) { bool addAttr = true; if (idx >= ifaceStartIdx) { for (int ifaceSkipIdx = 0; ifaceSkipIdx < _skipInterfaceAttributeList.Length; ifaceSkipIdx++) { if (_skipInterfaceAttributeList[ifaceSkipIdx].IsInstanceOfType(attrArray[idx])) { addAttr = false; break; } } } if (addAttr && !attrDictionary.Contains(attrArray[idx].TypeId)) { attrDictionary[attrArray[idx].TypeId] = attrArray[idx]; } } attrArray = new Attribute[attrDictionary.Count]; attrDictionary.Values.CopyTo(attrArray, 0); _attributes = new AttributeCollection(attrArray); } return _attributes; }
public override void GetUfsEDocsAssociados(long nivelId, long userID, IDbConnection conn) { long ticks = DateTime.Now.Ticks; UfsDocsAssoc = new Dictionary<long, List<long>>(); UfsSeriesAssoc = new Dictionary<long, List<long>>(); UfsAssoc = new OrderedDictionary(); DocsAssoc = new OrderedDictionary(); using (SqlCommand command = new SqlCommand(string.Empty, (SqlConnection)conn)) { command.CommandText = "CREATE TABLE #TempRelacaoHierarquica (ID BIGINT, IDUpper BIGINT, IDTipoNivelRelacionado BIGINT) " + "CREATE TABLE #UFRelated (IDUF BIGINT, IDNDoc BIGINT, IsNivelDoc TINYINT) " + "CREATE INDEX iduf_ix ON #UFRelated (IDUF)"; command.ExecuteNonQuery(); command.CommandText = "sp_getUFsEDocsAssociados"; command.CommandType = CommandType.StoredProcedure; command.Parameters.Add("@nivelID", SqlDbType.BigInt); command.Parameters.Add("@TrusteeID", SqlDbType.BigInt); command.Parameters[0].Value = nivelId; command.Parameters[1].Value = userID; SqlDataReader reader = command.ExecuteReader(); long item; long nivelID; while (reader.Read()) { item = System.Convert.ToInt64(reader.GetValue(0)); nivelID = System.Convert.ToInt64(reader.GetValue(1)); if (System.Convert.ToInt64(reader.GetValue(2)) == 1) { if (UfsSeriesAssoc.ContainsKey(item)) UfsSeriesAssoc[item].Add(nivelID); else UfsSeriesAssoc.Add(item, new List<long>() { nivelID }); } else { if (UfsDocsAssoc.ContainsKey(item)) UfsDocsAssoc[item].Add(nivelID); else UfsDocsAssoc.Add(item, new List<long>() { nivelID }); } } reader.NextResult(); while (reader.Read()) { item = System.Convert.ToInt64(reader.GetValue(0)); if (!UfsAssoc.Contains(item)) { UnidadeFisicaAssociada uf = new UnidadeFisicaAssociada(); uf.IDNivel = item; uf.Codigo = reader.GetValue(1).ToString(); uf.Designacao = reader.GetValue(2).ToString(); uf.FimAno = reader.GetValue(3).ToString(); uf.FimMes = reader.GetValue(4).ToString(); uf.FimDia = reader.GetValue(5).ToString(); uf.InicioAno = reader.GetValue(6).ToString(); uf.InicioMes = reader.GetValue(7).ToString(); uf.InicioDia = reader.GetValue(8).ToString(); uf.IsNotDocRelated = System.Convert.ToBoolean(reader.GetValue(9)); uf.IsSerieRelated = System.Convert.ToBoolean(reader.GetValue(10)); UfsAssoc.Add(item, uf); } } reader.NextResult(); while (reader.Read()) { item = System.Convert.ToInt64(reader.GetValue(0)); if (!DocsAssoc.Contains(item)) { DocumentoAssociado doc = new DocumentoAssociado(); doc.IDNivel = item; doc.IDNivelUpper = System.Convert.ToInt64(reader.GetValue(1)); doc.IDTipoNivelRelacionado = System.Convert.ToInt32(reader.GetValue(2)); doc.IDFRD = System.Convert.ToInt64(reader.GetValue(3)); doc.FimAno = reader.GetValue(4).ToString(); doc.FimMes = reader.GetValue(5).ToString(); doc.FimDia = reader.GetValue(6).ToString(); doc.InicioAno = reader.GetValue(7).ToString(); doc.InicioMes = reader.GetValue(8).ToString(); doc.InicioDia = reader.GetValue(9).ToString(); doc.Codigo = reader.GetValue(10).ToString(); doc.DesignacaoUpper = reader.GetValue(11).ToString(); doc.Designacao = reader.GetValue(12).ToString(); if (reader.GetValue(13) == DBNull.Value) doc.Preservar = string.Empty; else if (System.Convert.ToBoolean(reader.GetValue(13))) doc.Preservar = "1"; else doc.Preservar = "0"; doc.IDAutoEliminacao = reader.GetValue(14).ToString(); doc.Expirado = reader.GetValue(15).ToString(); doc.PermEscrever = reader.IsDBNull(16) ? false : System.Convert.ToBoolean(reader.GetValue(16)); DocsAssoc.Add(item, doc); } } reader.Close(); } Trace.WriteLine("<<sp_getUFsEDocsAssociados>>: " + new TimeSpan(DateTime.Now.Ticks - ticks).ToString()); }
/// <devdoc> /// Metdata filtering is the third stage of our pipeline. /// In this stage we check to see if the given object is a /// sited component that provides the ITypeDescriptorFilterService /// object. If it does, we allow the TDS to filter the metadata. /// This will use the cache, if available, to store filtered /// metdata. /// </devdoc> private static ICollection PipelineFilter(int pipelineType, ICollection members, object instance, IDictionary cache) { IComponent component = instance as IComponent; ITypeDescriptorFilterService componentFilter = null; if (component != null) { ISite site = component.Site; if (site != null) { componentFilter = site.GetService(typeof(ITypeDescriptorFilterService)) as ITypeDescriptorFilterService; } } // If we have no filter, there is nothing for us to do. // IList list = members as ArrayList; if (componentFilter == null) { Debug.Assert(cache == null || list == null || !cache.Contains(_pipelineFilterKeys[pipelineType]), "Earlier pipeline stage should have removed our cache"); return members; } // Now, check our cache. The cache state is only valid // if the data coming into us is read-only. If it is read-write, // that means something higher in the pipeline has already changed // it so we must recompute anyway. // if (cache != null && (list == null || list.IsReadOnly)) { FilterCacheItem cacheItem = cache[_pipelineFilterKeys[pipelineType]] as FilterCacheItem; if (cacheItem != null && cacheItem.IsValid(componentFilter)) { return cacheItem.FilteredMembers; } } // Cache either is dirty or doesn't exist. Re-filter the members. // We need to build an IDictionary of key->value pairs and invoke // Filter* on the filter service. // OrderedDictionary filterTable = new OrderedDictionary(members.Count); bool cacheResults; switch(pipelineType) { case PIPELINE_ATTRIBUTES: foreach(Attribute attr in members) { filterTable[attr.TypeId] = attr; } cacheResults = componentFilter.FilterAttributes(component, filterTable); break; case PIPELINE_PROPERTIES: case PIPELINE_EVENTS: foreach(MemberDescriptor desc in members) { string descName = desc.Name; // We must handle the case of duplicate property names // because extender providers can provide any arbitrary // name. Our rule for this is simple: If we find a // duplicate name, resolve it back to the extender // provider that offered it and append "_" + the // provider name. If the provider has no name, // then append the object hash code. // if (filterTable.Contains(descName)) { // First, handle the new property. Because // of the order in which we added extended // properties earlier in the pipeline, we can be // sure that the new property is an extender. We // cannot be sure that the existing property // in the table is an extender, so we will // have to check. // string suffix = GetExtenderCollisionSuffix(desc); Debug.Assert(suffix != null, "Name collision with non-extender property."); if (suffix != null) { filterTable[descName + suffix] = desc; } // Now, handle the original property. // MemberDescriptor origDesc = (MemberDescriptor)filterTable[descName]; suffix = GetExtenderCollisionSuffix(origDesc); if (suffix != null) { filterTable.Remove(descName); filterTable[origDesc.Name + suffix] = origDesc; } } else { filterTable[descName] = desc; } } if (pipelineType == PIPELINE_PROPERTIES) { cacheResults = componentFilter.FilterProperties(component, filterTable); } else { cacheResults = componentFilter.FilterEvents(component, filterTable); } break; default: Debug.Fail("unknown pipeline type"); cacheResults = false; break; } // See if we can re-use the IList were were passed. If we can, // it is more efficient to re-use its slots than to generate new ones. // if (list == null || list.IsReadOnly) { Trace("Pipeline : Filter needs to create member list for {0}", instance.GetType().Name); list = new ArrayList(filterTable.Values); } else { list.Clear(); foreach(object obj in filterTable.Values) { list.Add(obj); } } // Component filter has requested that we cache these // new changes. We store them as a correctly typed collection // so on successive invocations we can simply return. Note that // we always return the IList so that successive stages in the // pipeline can modify it. // if (cacheResults && cache != null) { ICollection cacheValue; switch(pipelineType) { case PIPELINE_ATTRIBUTES: Attribute[] attrArray = new Attribute[list.Count]; try { list.CopyTo(attrArray, 0); } catch(InvalidCastException) { throw new ArgumentException(SR.GetString(SR.TypeDescriptorExpectedElementType, typeof(Attribute).FullName)); } cacheValue = new AttributeCollection(attrArray); break; case PIPELINE_PROPERTIES: PropertyDescriptor[] propArray = new PropertyDescriptor[list.Count]; try { list.CopyTo(propArray, 0); } catch(InvalidCastException) { throw new ArgumentException(SR.GetString(SR.TypeDescriptorExpectedElementType, typeof(PropertyDescriptor).FullName)); } cacheValue = new PropertyDescriptorCollection(propArray, true); break; case PIPELINE_EVENTS: EventDescriptor[] eventArray = new EventDescriptor[list.Count]; try { list.CopyTo(eventArray, 0); } catch(InvalidCastException) { throw new ArgumentException(SR.GetString(SR.TypeDescriptorExpectedElementType, typeof(EventDescriptor).FullName)); } cacheValue = new EventDescriptorCollection(eventArray, true); break; default: Debug.Fail("unknown pipeline type"); cacheValue = null; break; } Trace("Pipeline : Filter results being cached for {0}", instance.GetType().Name); FilterCacheItem cacheItem = new FilterCacheItem(componentFilter, cacheValue); cache[_pipelineFilterKeys[pipelineType]] = cacheItem; cache.Remove(_pipelineAttributeFilterKeys[pipelineType]); } return list; }
private static void ProcessWriteLemma(string lemma, string gender, OrderedDictionary lemmaToGender, XmlNode lemmaNode, string additionalXpathFilter, StreamWriter writerFull) { if (lemmaToGender.Contains(lemma)) { //already had an item with same lemma, so it must be another gender if ((string)lemmaToGender[lemma] == gender) { //for some reason happens with "АБДАЛОВИЧ" //throw new ApplicationException($"Duplicate lemma {lemma} with same gender {gender}"); } else { lemmaToGender[lemma] = "мр-жр"; } } else { lemmaToGender[lemma] = gender; } /* if (isFixed) { writerFull.WriteLine($"{lemma}\t{lemma}\t{gender},ед,0"); } else { */ foreach (XmlNode childNode in lemmaNode.SelectNodes(GetProperFnodesXpathQuery(additionalXpathFilter)) ) { string form = childNode.Attributes["t"].Value.ToUpper(); switch (form) //fix mistakes i dict.opencorpora (see https://github.com/petrovich/petrovich-eval/pull/2/commits/4c33042ae0aa46bfbe4e5e46a289e3137aaa9549) { case "ЖУРАВЛВЁОЙ": form = "ЖУРАВЛЁВОЙ"; break; case "КОВАЁВУ": form = "КОВАЛЁВУ"; break; } string @case = ConvertCase(childNode, lemma); if (@case == null) { continue; //unsupported weird case } if (! ( (childNode.ChildNodes.Count == 2) || (childNode.ChildNodes.Count == 3 && childNode.ChildNodes.OfType<XmlNode>().Where(node => node.Attributes["v"].Value == "V-ie").Any() ) ) ) { throw new ApplicationException($"some strange info for lemma {lemma} in form {form}"); } writerFull.WriteLine($"{lemma}\t{form}\t{gender},ед,{@case}"); } }