public void Regress_Mutation_ConstructThroughDictionaryIsShallowCopy() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); CopyOnWriteHashtable refc = new CopyOnWriteHashtable((IDictionary)c, StringComparer.OrdinalIgnoreCase); Assertion.Assert(refc.IsShallowCopy); }
/// <summary> /// Construct over an IDictionary instance. /// </summary> /// <param name="dictionary"></param> /// <param name="stringComparer">The string comparer to use.</param> internal CopyOnWriteHashtable(IDictionary dictionary, StringComparer stringComparer) { ErrorUtilities.VerifyThrowArgumentNull(dictionary, "dictionary"); ErrorUtilities.VerifyThrowArgumentNull(stringComparer, "stringComparer"); this.sharedLock = new object(); CopyOnWriteHashtable source = dictionary as CopyOnWriteHashtable; if (source != null) { if (source.stringComparer.GetHashCode() == stringComparer.GetHashCode()) { // If we're copying another CopyOnWriteHashtable then we can defer the clone until later. ConstructFrom(source); return; } else { // Technically, it would be legal to fall through here and let a new hashtable be constructed. // However, Engine is supposed to use consistent case comparisons everywhere and so, for us, // this means a bug in the engine code somewhere. throw new InternalErrorException("Bug: Changing the case-sensitiveness of a copied hash-table."); } } // Can't defer this because we don't control what gets written to the dictionary exogenously. writeableData = new Hashtable(dictionary, stringComparer); readonlyData = null; this.stringComparer = stringComparer; }
public void Regress450669_CaseSensitiveBatch_HashtableCopyRespectsComparer() { Hashtable c = new Hashtable(StringComparer.OrdinalIgnoreCase); c["key"] = null; CopyOnWriteHashtable refc = new CopyOnWriteHashtable(c, StringComparer.OrdinalIgnoreCase); Assertion.Assert(c.ContainsKey("kEy")); Assertion.Assert(refc.ContainsKey("kEy")); }
public void Regress_SettingWhenValueNull() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); CopyOnWriteHashtable refc = new CopyOnWriteHashtable(c, StringComparer.OrdinalIgnoreCase); Hashtable h = new Hashtable(); refc["key"] = null; h["key"] = null; Assertion.AssertEquals(h.ContainsKey("key"), refc.ContainsKey("key")); Assertion.Assert(!c.ContainsKey("key")); }
public void Basic() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); CopyOnWriteHashtable b = (CopyOnWriteHashtable)c.Clone(); CopyOnWriteHashtable a = (CopyOnWriteHashtable)b.Clone(); c["Foo"] = "Bar"; // Just wrote to 'c' so it should contain data. Assertion.Assert(c.ContainsKey("Foo")); // Writing to a depended upon hashtable should not be visible to the dependents. Assertion.Assert(!a.ContainsKey("Foo")); Assertion.Assert(!b.ContainsKey("Foo")); }
/// <summary> /// Implementation of construction logic. /// </summary> /// <param name="that"></param> private void ConstructFrom(CopyOnWriteHashtable that) { lock (that.sharedLock) { this.writeableData = null; // If the source it was writeable, need to transform it into // read-only because we don't want subsequent writes to bleed through. if (that.writeableData != null) { that.readonlyData = that.writeableData; that.writeableData = null; } this.readonlyData = that.readonlyData; this.stringComparer = that.stringComparer; } }
/// <summary> /// Initializes the cache for storing custom attributes (meta-data). /// </summary> private void InitializeCustomMetadataCache() { this.unevaluatedCustomMetadata = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); this.evaluatedCustomMetadata = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); }
/// <summary> /// This constructor creates a new virtual (non-persisted) item based /// on a ITaskItem object that was emitted by a task. /// </summary> public BuildItem(string itemName, ITaskItem taskItem) { ErrorUtilities.VerifyThrowArgumentNull(taskItem, "taskItem"); string itemInclude = EscapingUtilities.Escape(taskItem.ItemSpec); BuildItemHelper ( null /* this is a virtual item with no backing XML */, itemName, itemInclude, false, /* PERF NOTE: don't waste time creating a new custom metadata cache, * because we're going to clone the given task item's custom metadata */ null /* no definition library */ ); IDictionary rawSourceTable = taskItem.CloneCustomMetadata(); // Go through and escape the metadata as necessary. string[] keys = new string[rawSourceTable.Count]; rawSourceTable.Keys.CopyTo(keys, 0); foreach (string singleMetadataName in keys) { string singleMetadataValue = (string) rawSourceTable[singleMetadataName]; rawSourceTable[singleMetadataName] = EscapingUtilities.Escape(singleMetadataValue); } this.unevaluatedCustomMetadata = new CopyOnWriteHashtable(rawSourceTable, StringComparer.OrdinalIgnoreCase); this.evaluatedCustomMetadata = new CopyOnWriteHashtable(rawSourceTable, StringComparer.OrdinalIgnoreCase); this.isPartOfProjectManifest = false; }
internal void CreateFromStream(BinaryReader reader) { importedFromAnotherProject = reader.ReadBoolean(); #region RecursivePortionOfFinalItemSpecDirectory if (reader.ReadByte() == 0) { recursivePortionOfFinalItemSpecDirectory = null; } else { recursivePortionOfFinalItemSpecDirectory = reader.ReadString(); } #endregion #region FinalItemSpecEscaped if (reader.ReadByte() == 0) { finalItemSpecEscaped = null; } else { finalItemSpecEscaped = reader.ReadString(); } #endregion #region Name if (reader.ReadByte() == 0) { name = null; } else { name = reader.ReadString(); } #endregion #region VitrualIncludeAttribute if (reader.ReadByte() == 0) { include = null; } else { include = reader.ReadString(); } #endregion #region EvaluatedItemSpecEscaped if (reader.ReadByte() == 0) { evaluatedItemSpecEscaped = null; } else { evaluatedItemSpecEscaped = reader.ReadString(); } #endregion #region UnevaluatedCustomMetadata if (reader.ReadByte() == 0) { unevaluatedCustomMetadata = null; } else { int numberUnevaluatedItems = reader.ReadInt32(); unevaluatedCustomMetadata = new CopyOnWriteHashtable(numberUnevaluatedItems, StringComparer.OrdinalIgnoreCase); for (int i = 0; i < numberUnevaluatedItems; i++) { string key = reader.ReadString(); string value = null; if (reader.ReadByte() != 0) { value = reader.ReadString(); } unevaluatedCustomMetadata.Add(key, value); } } #endregion #region EvaluatedCustomMetadata if (reader.ReadByte() == 0) { evaluatedCustomMetadata = null; } else { int numberevaluatedCustomMetadata = reader.ReadInt32(); evaluatedCustomMetadata = new CopyOnWriteHashtable(numberevaluatedCustomMetadata, StringComparer.OrdinalIgnoreCase); for (int i = 0; i < numberevaluatedCustomMetadata; i++) { string key = reader.ReadString(); string value = null; if (reader.ReadByte() != 0) { value = reader.ReadString(); } evaluatedCustomMetadata.Add(key, value); } } #endregion #region ItemSpecModifiers if (reader.ReadByte() == 0) { itemSpecModifiers = null; } else { int numberItemSpecModifiers = reader.ReadInt32(); itemSpecModifiers = new Hashtable(numberItemSpecModifiers); for (int i = 0; i < numberItemSpecModifiers; i++) { string key = reader.ReadString(); string value = null; if (reader.ReadByte() != 0) { value = reader.ReadString(); } itemSpecModifiers.Add(key, value); } } #endregion }
/// <summary> /// If the metadata tables were backed up, revert them to the originals and /// throw out the backups. /// </summary> internal void RevertToPersistedMetadata() { if (IsBackedUp) { unevaluatedCustomMetadata = unevaluatedCustomMetadataBackup; evaluatedCustomMetadata = evaluatedCustomMetadataBackup; unevaluatedCustomMetadataBackup = null; evaluatedCustomMetadataBackup = null; } }
private void BackupPersistedMetadata() { if (!IsBackedUp) { unevaluatedCustomMetadataBackup = (CopyOnWriteHashtable)unevaluatedCustomMetadata.Clone(); evaluatedCustomMetadataBackup = (CopyOnWriteHashtable)evaluatedCustomMetadata.Clone(); } }
internal void CreateFromStream(BinaryReader reader) { if (reader.ReadByte() == 0) { propertyTableByName = null; } else { // Write Number of HashItems int numberOfHashKeyValuePairs = reader.ReadInt32(); propertyTableByName = new CopyOnWriteHashtable(numberOfHashKeyValuePairs, StringComparer.OrdinalIgnoreCase); for (int i = 0; i < numberOfHashKeyValuePairs; i++) { string key = reader.ReadString(); BuildProperty value = null; if (reader.ReadByte() == 1) { value = BuildProperty.CreateFromStream(reader); } propertyTableByName.Add(key, value); } } }
public void Regress450669_CaseSensitiveBatch_WeDontAllowChangingCaseOnCopiedHashTable() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); CopyOnWriteHashtable refc = new CopyOnWriteHashtable(c, StringComparer.Ordinal); // Different case. }
/// <summary> /// Constructor which creates a new <PropertyGroup> in the XML document /// specified. /// </summary> /// <param name="ownerDocument"></param> /// <param name="importedFromAnotherProject"></param> /// <owner>RGoel</owner> internal BuildPropertyGroup ( Project parentProject, XmlDocument ownerDocument, bool importedFromAnotherProject ) { error.VerifyThrow(ownerDocument != null, "Need valid XmlDocument owner for this property group."); this.parentProject = parentProject; // Create the new <PropertyGroup> XML element. this.propertyGroupElement = ownerDocument.CreateElement(XMakeElements.propertyGroup, XMakeAttributes.defaultXmlNamespace); this.importedFromAnotherProject = importedFromAnotherProject; this.ownerDocument = ownerDocument; this.conditionAttribute = null; this.propertyTableByName = null; this.propertyList = new ArrayList(); }
/// <summary> /// Constructor, from an existing <PropertyGroup> XML element. /// </summary> internal BuildPropertyGroup(Project parentProject, XmlElement propertyGroupElement, PropertyType propertyType) { error.VerifyThrow(propertyGroupElement != null, "Need valid <PropertyGroup> element."); // Make sure this really is the <PropertyGroup> node. ProjectXmlUtilities.VerifyThrowElementName(propertyGroupElement, XMakeElements.propertyGroup); this.parentProject = parentProject; this.propertyGroupElement = propertyGroupElement; this.importedFromAnotherProject = (propertyType == PropertyType.ImportedProperty); this.conditionAttribute = null; this.propertyTableByName = null; this.propertyList = new ArrayList(); this.ownerDocument = propertyGroupElement.OwnerDocument; // This <PropertyGroup> is coming from an existing XML element, so // walk through all the attributes and child elements, creating the // necessary BuildProperty objects. // Loop through the list of attributes on the <PropertyGroup> element. foreach (XmlAttribute propertyGroupAttribute in this.propertyGroupElement.Attributes) { switch (propertyGroupAttribute.Name) { // Process the "condition" attribute. case XMakeAttributes.condition: this.conditionAttribute = propertyGroupAttribute; break; // only recognized by the new OM: // just ignore here case XMakeAttributes.label: // do nothing break; // Unrecognized attribute. default: ProjectXmlUtilities.ThrowProjectInvalidAttribute(propertyGroupAttribute); break; } } // Loop through the child nodes of the <PropertyGroup> element. foreach (XmlNode propertyGroupChildNode in this.propertyGroupElement) { switch (propertyGroupChildNode.NodeType) { // Handle XML comments under the <PropertyGroup> node (just ignore them). case XmlNodeType.Comment: // fall through case XmlNodeType.Whitespace: // ignore whitespace break; case XmlNodeType.Element: // The only type of child node that a <PropertyGroup> element can contain // is a property element. // Make sure the property doesn't have a custom namespace ProjectXmlUtilities.VerifyThrowProjectValidNamespace((XmlElement)propertyGroupChildNode); // Send the property element to another class for processing. BuildProperty newProperty = new BuildProperty((XmlElement)propertyGroupChildNode, propertyType); newProperty.ParentPersistedPropertyGroup = this; this.propertyList.Add(newProperty); break; default: // Unrecognized child element. ProjectXmlUtilities.ThrowProjectInvalidChildElement(propertyGroupChildNode); break; } } }
/// <summary> /// Constructor for empty virtual (non-persisted) BuildPropertyGroup. Use this constructor /// when the initial number of properties can be estimated, to reduce re-sizing of the list. /// </summary> private BuildPropertyGroup(Project parentProject, int capacity) { this.parentProject = parentProject; this.propertyGroupElement = null; this.importedFromAnotherProject = false; this.conditionAttribute = null; this.propertyTableByName = new CopyOnWriteHashtable(capacity, StringComparer.OrdinalIgnoreCase); this.propertyList = null; }
/// <summary> /// Helper for the Clear methods /// </summary> internal void ClearHelper(bool clearImportedPropertyGroup) { // If this group is backed by XML, clear all attributes and // children out unless it's an imported group, in which case we don't want to modify the XML if (IsPersisted && !clearImportedPropertyGroup) { MustBePersisted("NeedPersistedPropertyGroup", XMakeElements.propertyGroup); // We don't allow any modifications to the XML of any of the imported // project files ... only the main project file. error.VerifyThrowInvalidOperation(!this.importedFromAnotherProject || clearImportedPropertyGroup, "CannotModifyImportedProjects"); // Remove all of the property elements from wherever they may be. foreach (BuildProperty propertyToRemove in this.propertyList) { // Find the property XML element. XmlElement propertyElement = propertyToRemove.PropertyElement; MustBelongToPropertyGroup(propertyElement); // Remove the property element. propertyElement.ParentNode.RemoveChild(propertyElement); propertyToRemove.ParentPersistedPropertyGroup = null; } MustBePersisted("NeedPersistedPropertyGroup", XMakeElements.propertyGroup); } this.conditionAttribute = null; // Clear the contents of the hash table, if one exists. if (this.propertyTableByName != null) { this.propertyTableByName.Clear(); } // clear out saved properties propertiesOverriddenByOutputProperties = null; // Clear the contents of the arraylist, if one exists. if (this.propertyList != null) { this.propertyList.Clear(); } this.MarkPropertyGroupAsDirty(); }
/// <summary> /// Removes all output properties, and restores the non-output properties that were overridden. /// Requires property group to be virtual. /// </summary> internal void RevertAllOutputProperties() { MustBeVirtual("NeedVirtualPropertyGroup"); if (propertiesOverriddenByOutputProperties != null) { foreach (DictionaryEntry propertyEntry in propertiesOverriddenByOutputProperties) { propertyTableByName.Remove(propertyEntry.Key); if (propertyEntry.Value != null) { propertyTableByName.Add(propertyEntry.Key, propertyEntry.Value); } } propertiesOverriddenByOutputProperties = null; } MarkPropertyGroupAsDirty(); }
public void Regress_Mutation_MustHaveNonNullStringComparer2() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(null, null); }
public void Regress_Mutation_ClearReadOnlyData() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); c["key"] = "value"; CopyOnWriteHashtable refc = new CopyOnWriteHashtable(c, StringComparer.OrdinalIgnoreCase); Assertion.Assert(refc.ContainsKey("key")); Assertion.Assert(refc.IsShallowCopy); c.Clear(); Assertion.Assert(refc.ContainsKey("key")); Assertion.Assert(!c.ContainsKey("key")); }
/// <summary> /// Construct a shallow copy over another instance of this class. /// </summary> /// <param name="that"></param> private CopyOnWriteHashtable(CopyOnWriteHashtable that) { this.sharedLock = new object(); ConstructFrom(that); }
/// <summary> /// Sets a property. /// /// Either overrides the value of the property with the given name, or adds it if it /// doesn't already exist. Setting to the same value as before does nothing. /// /// This method will take into account property precedence rules, so that for /// example, a reserved MSBuild property cannot be overridden by a normal property. /// /// PropertyGroup must be virtual. /// </summary> /// <param name="newProperty"></param> internal void SetProperty ( BuildProperty newProperty ) { // We don't support this method for PropertyGroups that are // represented by an actual <PropertyGroup> element. This is because // persisted PropertyGroups can contain multiple properties with the same // name, so the behavior of SetProperty becomes ambiguous. MustBeVirtual("NeedVirtualPropertyGroup"); // If a property with this name already exists in our collection, then we have // to override it, taking into account the precedence rules for properties. BuildProperty existingProperty = (BuildProperty)propertyTableByName[newProperty.Name]; bool isEquivalentToExistingProperty = false; if (existingProperty != null) { // If the existing property is an XMake reserved property, we may have an // invalid project file, because reserved properties are not allowed to // be set. // Don't fail if the new property is itself a "reserved" property. We // want to be able to override reserved properties with new reserved // properties, otherwise the engine itself would never be allowed to // change the value of a reserved property. ProjectErrorUtilities.VerifyThrowInvalidProject( (existingProperty.Type != PropertyType.ReservedProperty) || (newProperty.Type == PropertyType.ReservedProperty), newProperty.PropertyElement, "CannotModifyReservedProperty", newProperty.Name); // Also make sure it's not a read-only property (such as a property // that was set at the XMake command-line), but don't actually throw // an error in this case. Only output properties from tasks are allowed // to override read-only properties if ((existingProperty.Type == PropertyType.GlobalProperty) && (newProperty.Type != PropertyType.OutputProperty)) { return; } isEquivalentToExistingProperty = newProperty.IsEquivalent(existingProperty); if (!isEquivalentToExistingProperty) { // Allow properties to be "set" to the same value during a build. This is because Visual Studio unfortunately does this often, // and it is safe to do this, because we won't actually change any state. ErrorUtilities.VerifyThrowInvalidOperation(parentProject == null || !parentProject.IsBuilding, "CannotSetPropertyDuringBuild"); } } // Keep track of all output properties, so we can remove them later. if (newProperty.Type == PropertyType.OutputProperty) { if (propertiesOverriddenByOutputProperties == null) { propertiesOverriddenByOutputProperties = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); } if (propertiesOverriddenByOutputProperties.Contains(newProperty.Name)) { error.VerifyThrow(existingProperty != null, "If we've overridden this property before, it must exist in the main property table."); error.VerifyThrow(existingProperty.Type == PropertyType.OutputProperty, "If we've overriden this property before, it must be stored as an output property in the main property table."); } else { error.VerifyThrow((existingProperty == null) || (existingProperty.Type != PropertyType.OutputProperty), "If the property already exists in the main property table, it can't already be there as an output property, because then we would have stored an entry in propertiesOverriddenByOutputProperties."); // NOTE: Use Hashtable.Add() because each output property should only be added to this // table once. If we ever try to add the same output property to this table twice, // it's a bug in our code. // "existingProperty" may be null, and that's okay. propertiesOverriddenByOutputProperties.Add(newProperty.Name, existingProperty); } } // Okay, now actually set our property, but only if the value has actually changed. if (!isEquivalentToExistingProperty) { this.propertyTableByName[newProperty.Name] = newProperty; this.MarkPropertyGroupAsDirty(); } }
/// <summary> /// Clones the hashtables which cache the values of all the custom metadata on this item. /// Callers should do this when they know that they have a shallow clone of another item, /// and they want to modify the attributes on this item without touching the original item. /// </summary> /// <owner>RGoel</owner> internal void CloneVirtualMetadata() { this.evaluatedCustomMetadata = (CopyOnWriteHashtable)this.evaluatedCustomMetadata.Clone(); this.unevaluatedCustomMetadata = (CopyOnWriteHashtable)this.unevaluatedCustomMetadata.Clone(); }
/// <summary> /// Returns a dictionary containing any default metadata, with the provided set of metadata /// overlaid on those defaults. /// </summary> private IDictionary MergeDefaultMetadata(IDictionary customMetadata) { IDictionary result = customMetadata; if (itemDefinitionLibrary != null) { IDictionary defaultedMetadata = itemDefinitionLibrary.GetDefaultedMetadata(name); if (defaultedMetadata != null) { // If we have any defaulted metadata, return a dictionary containing both specific and defaulted // metadata, with the specific metadata winning if there's a conflict. result = new CopyOnWriteHashtable(defaultedMetadata, StringComparer.OrdinalIgnoreCase); foreach (DictionaryEntry metadata in customMetadata) { result[metadata.Key] = metadata.Value; } } } return result; }
public void Regress_Mutation_WritingSameValueShouldNotCauseDeepCopy() { CopyOnWriteHashtable c = new CopyOnWriteHashtable(StringComparer.OrdinalIgnoreCase); c["answer"] = "yes"; CopyOnWriteHashtable refc = new CopyOnWriteHashtable(c, StringComparer.OrdinalIgnoreCase); Assertion.Assert(refc.IsShallowCopy); refc["answer"] = "yes"; Assertion.Assert(refc.IsShallowCopy); // Setting the same value should not cause a deep copy. }