/// <summary> /// Constructs a token bucket object that can be compared against other /// buckets. This dummy bucket is a patently invalid bucket, and cannot /// be used for any other operations besides comparison. /// </summary> /// <remarks> /// PERF NOTE: A dummy bucket is intentionally very light-weight, and it /// allocates a minimum of memory compared to a real bucket. /// </remarks> /// <owner>SumedhK</owner> /// <param name="itemMetadata"></param> /// <returns>An item bucket that is invalid for everything except comparisons.</returns> internal static ItemBucket GetDummyBucketForComparisons(Dictionary <string, string> itemMetadata) { ItemBucket bucket = new ItemBucket(); bucket.expander = new Expander((ReadOnlyLookup)null, itemMetadata); return(bucket); }
/// <summary> /// Helper method so we can keep the real Expander.ExpandMetadataAndProperties private. /// </summary> /// <param name="bucket"></param> /// <param name="expression"></param> /// <returns></returns> /// <owner>RGoel</owner> private static string ExpandMetadataAndProperties ( ItemBucket bucket, string expression ) { Expander itemExpander = new Expander(bucket.Expander, ExpanderOptions.ExpandPropertiesAndMetadata); return itemExpander.ExpandAllIntoString(expression, (new XmlDocument()).CreateAttribute("foo")); }
/// <summary> /// Add items to the world. This is the in-target equivalent of an item include expression outside of a target. /// </summary> private void ExecuteAdd(BuildItemGroupChildXml child, ItemBucket bucket) { // By making the items "not persisted", we ensure they are cleaned away when the project is reset BuildItem item = new BuildItem(child.Element, false /* not imported */, false /* not persisted */, itemDefinitionLibrary); // If the condition on the item is false, Evaluate returns an empty group BuildItemGroup itemsToAdd = item.Evaluate(bucket.Expander, executionDirectory, true /* expand metadata */, ParserOptions.AllowAll, loggingServices, buildEventContext); bucket.Lookup.AddNewItems(itemsToAdd); }
/// <summary> /// Helper method so we can keep the real Expander.ExpandItemsIntoString private. /// </summary> /// <param name="bucket"></param> /// <param name="expression"></param> /// <returns></returns> /// <owner>RGoel</owner> private static string ExpandItemsIntoString ( ItemBucket bucket, string expression ) { Expander itemExpander = new Expander(new ReadOnlyLookup(bucket.Lookup), null, ExpanderOptions.ExpandItems); return itemExpander.ExpandAllIntoString(expression, (new XmlDocument()).CreateAttribute("foo")); }
/// <summary> /// Modifies items in the world - specifically, changes their metadata. Changes to items that are part of the project manifest are backed up, so /// they can be reverted when the project is reset after the end of the build. /// </summary> /// <param name="child"></param> /// <param name="bucket"></param> private void ExecuteModify(BuildItemGroupChildXml child, ItemBucket bucket) { if (!Utilities.EvaluateCondition(child.Condition, child.ConditionAttribute, bucket.Expander, ParserOptions.AllowAll, loggingServices, buildEventContext)) { return; } BuildItemGroup group = (BuildItemGroup)bucket.Lookup.GetItems(child.Name); if (group == null) { // No items of this type to modify return; } // Figure out what metadata names and values we need to set Dictionary <string, string> metadataToSet = new Dictionary <string, string>(StringComparer.OrdinalIgnoreCase); List <XmlElement> metadataElements = child.GetChildren(); foreach (XmlElement metadataElement in metadataElements) { bool metadataCondition = true; XmlAttribute conditionAttribute = ProjectXmlUtilities.GetConditionAttribute(metadataElement, true /*no other attributes allowed*/); if (conditionAttribute != null) { metadataCondition = Utilities.EvaluateCondition(conditionAttribute.Value, conditionAttribute, bucket.Expander, ParserOptions.AllowAll, loggingServices, buildEventContext); } if (metadataCondition) { string unevaluatedMetadataValue = Utilities.GetXmlNodeInnerContents(metadataElement); string evaluatedMetadataValue = bucket.Expander.ExpandAllIntoStringLeaveEscaped(unevaluatedMetadataValue, metadataElement); // The last metadata with a particular name, wins, so we just set through the indexer here. metadataToSet[metadataElement.Name] = evaluatedMetadataValue; } } bucket.Lookup.ModifyItems(child.Name, group, metadataToSet); }
public void InitiallyNoItemsInBucketOfTypesInItemNames() { // This bucket is for items of type "i" string[] itemNames = new string[] { "i" }; // There are items of type "i" and "j" available in the project, though BuildItemGroup group1 = new BuildItemGroup(); BuildItemGroup group2 = new BuildItemGroup(); group1.AddNewItem("i", "i1"); group2.AddNewItem("j", "j1"); Hashtable items = new Hashtable(StringComparer.OrdinalIgnoreCase); items.Add("i", group1); items.Add("j", group2); Lookup lookup = LookupHelpers.CreateLookup(items); ItemBucket bucket = new ItemBucket(itemNames, new Dictionary<string, string>(), lookup, 0); // No items of type i Assertion.AssertEquals(0, bucket.Lookup.GetItems("i1").Count); // Items of type j, however, are visible Assertion.AssertEquals(1, bucket.Lookup.GetItems("j").Count); }
/// <summary> /// Remove items from the world. Removes to items that are part of the project manifest are backed up, so /// they can be reverted when the project is reset after the end of the build. /// </summary> private void ExecuteRemove(BuildItemGroupChildXml child, ItemBucket bucket) { if (!Utilities.EvaluateCondition(child.Condition, child.ConditionAttribute, bucket.Expander, ParserOptions.AllowAll, loggingServices, buildEventContext)) { return; } BuildItemGroup group = bucket.Lookup.GetItems(child.Name); if (group == null) { // No items of this type to remove return; } List <BuildItem> itemsToRemove = BuildItemGroup.FindItemsMatchingSpecification(group, child.Remove, child.RemoveAttribute, bucket.Expander, executionDirectory); if (itemsToRemove != null) { bucket.Lookup.RemoveItems(itemsToRemove); } }
/// <summary> /// Recomputes the task's "ContinueOnError" setting. /// </summary> /// <param name="bucket"></param> /// <param name="engineProxy"></param> private void UpdateContinueOnError(ItemBucket bucket, EngineProxy engineProxy) { XmlAttribute continueOnErrorAttribute = taskNode.Attributes[XMakeAttributes.continueOnError]; try { continueOnError = ( // if attribute doesn't exist, default to "false" (continueOnErrorAttribute != null) && // otherwise, convert its value to a boolean ConversionUtilities.ConvertStringToBool ( // expand embedded item vectors after expanding properties and item metadata bucket.Expander.ExpandAllIntoString(continueOnErrorAttribute) ) ); } // handle errors in string-->bool conversion catch (ArgumentException e) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, continueOnErrorAttribute, "InvalidContinueOnErrorAttribute", TaskName, e.Message); } // We need to access an internal method of the EngineProxy in order to update the value // of continueOnError that will be returned to the task when the task queries IBuildEngine for it engineProxy.UpdateContinueOnError(continueOnError); }
/// <summary> /// Remove items from the world. Removes to items that are part of the project manifest are backed up, so /// they can be reverted when the project is reset after the end of the build. /// </summary> private void ExecuteRemove(BuildItemGroupChildXml child, ItemBucket bucket) { if (!Utilities.EvaluateCondition(child.Condition, child.ConditionAttribute, bucket.Expander, ParserOptions.AllowAll, loggingServices, buildEventContext)) { return; } BuildItemGroup group = bucket.Lookup.GetItems(child.Name); if (group == null) { // No items of this type to remove return; } List<BuildItem> itemsToRemove = BuildItemGroup.FindItemsMatchingSpecification(group, child.Remove, child.RemoveAttribute, bucket.Expander, executionDirectory); if (itemsToRemove != null) { bucket.Lookup.RemoveItems(itemsToRemove); } }
/// <summary> /// Modifies items in the world - specifically, changes their metadata. Changes to items that are part of the project manifest are backed up, so /// they can be reverted when the project is reset after the end of the build. /// </summary> /// <param name="child"></param> /// <param name="bucket"></param> private void ExecuteModify(BuildItemGroupChildXml child, ItemBucket bucket) { if (!Utilities.EvaluateCondition(child.Condition, child.ConditionAttribute, bucket.Expander, ParserOptions.AllowAll, loggingServices, buildEventContext)) { return; } BuildItemGroup group = (BuildItemGroup)bucket.Lookup.GetItems(child.Name); if (group == null) { // No items of this type to modify return; } // Figure out what metadata names and values we need to set Dictionary<string, string> metadataToSet = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); List<XmlElement> metadataElements = child.GetChildren(); foreach (XmlElement metadataElement in metadataElements) { bool metadataCondition = true; XmlAttribute conditionAttribute = ProjectXmlUtilities.GetConditionAttribute(metadataElement, true /*no other attributes allowed*/); if (conditionAttribute != null) { metadataCondition = Utilities.EvaluateCondition(conditionAttribute.Value, conditionAttribute, bucket.Expander, ParserOptions.AllowAll, loggingServices, buildEventContext); } if (metadataCondition) { string unevaluatedMetadataValue = Utilities.GetXmlNodeInnerContents(metadataElement); string evaluatedMetadataValue = bucket.Expander.ExpandAllIntoStringLeaveEscaped(unevaluatedMetadataValue, metadataElement); // The last metadata with a particular name, wins, so we just set through the indexer here. metadataToSet[metadataElement.Name] = evaluatedMetadataValue; } } bucket.Lookup.ModifyItems(child.Name, group, metadataToSet); }
/// <summary> /// Parses the target's "Inputs" and "Outputs" attributes and gathers up referenced items. /// </summary> /// <param name="bucket"></param> /// <param name="itemVectorsInTargetInputs"></param> /// <param name="itemVectorTransformsInTargetInputs"></param> /// <param name="discreteItemsInTargetInputs"></param> /// <param name="itemVectorsInTargetOutputs"></param> /// <param name="discreteItemsInTargetOutputs"></param> /// <param name="targetOutputItemSpecs"></param> /// <owner>SumedhK</owner> private void ParseTargetInputOutputSpecifications ( ItemBucket bucket, out Hashtable itemVectorsInTargetInputs, out Hashtable itemVectorTransformsInTargetInputs, out Hashtable discreteItemsInTargetInputs, out Hashtable itemVectorsInTargetOutputs, out Hashtable discreteItemsInTargetOutputs, out ArrayList targetOutputItemSpecs ) { // break down the input/output specifications along the standard separator, after expanding all embedded properties // and item metadata Expander propertyAndMetadataExpander = new Expander(bucket.Expander, ExpanderOptions.ExpandPropertiesAndMetadata); List<string> targetInputs = propertyAndMetadataExpander.ExpandAllIntoStringListLeaveEscaped(TargetInputSpecification, this.targetInputsAttribute); List<string> targetOutputs = propertyAndMetadataExpander.ExpandAllIntoStringListLeaveEscaped(TargetOutputSpecification, this.targetOutputsAttribute); itemVectorTransformsInTargetInputs = new Hashtable(StringComparer.OrdinalIgnoreCase); // figure out which of the inputs are: // 1) item vectors // 2) item vectors with transforms // 3) "discrete" items i.e. items that do not reference item vectors SeparateItemVectorsFromDiscreteItems(this.targetInputsAttribute, targetInputs, bucket, out itemVectorsInTargetInputs, itemVectorTransformsInTargetInputs, out discreteItemsInTargetInputs); // figure out which of the outputs are: // 1) item vectors (with or without transforms) // 2) "discrete" items i.e. items that do not reference item vectors SeparateItemVectorsFromDiscreteItems(this.targetOutputsAttribute, targetOutputs, bucket, out itemVectorsInTargetOutputs, null /* don't want transforms separated */, out discreteItemsInTargetOutputs); // list out all the output item-specs targetOutputItemSpecs = GetItemSpecsFromItemVectors(itemVectorsInTargetOutputs); targetOutputItemSpecs.AddRange(discreteItemsInTargetOutputs.Values); }
/********************************************************************************* * * Helpers * *********************************************************************************/ private void InstantiateMockTaskHelper ( XmlElement taskNode, out TaskEngine taskEngine, out MockTask mockTask, out ItemBucket itemBucket, out EngineProxy engineProxy ) { InstantiateMockTaskHelper(taskNode, out taskEngine, out mockTask, out itemBucket, out engineProxy, null); }
/// <summary> /// Constructs a token bucket object that can be compared against other /// buckets. This dummy bucket is a patently invalid bucket, and cannot /// be used for any other operations besides comparison. /// </summary> /// <remarks> /// PERF NOTE: A dummy bucket is intentionally very light-weight, and it /// allocates a minimum of memory compared to a real bucket. /// </remarks> /// <owner>SumedhK</owner> /// <param name="itemMetadata"></param> /// <returns>An item bucket that is invalid for everything except comparisons.</returns> internal static ItemBucket GetDummyBucketForComparisons(Dictionary<string, string> itemMetadata) { ItemBucket bucket = new ItemBucket(); bucket.expander = new Expander((ReadOnlyLookup)null, itemMetadata); return bucket; }
/// <summary> /// Uses the given task output specification to (statically) infer the task's outputs. /// </summary> /// <param name="taskOutputSpecification"></param> /// <param name="taskParameterName"></param> /// <param name="itemName">can be null</param> /// <param name="propertyName">can be null</param> /// <param name="bucket"></param> private void InferTaskOutputs ( Lookup lookup, TaskOutput taskOutputSpecification, string taskParameterName, string itemName, string propertyName, ItemBucket bucket ) { // if the task has a value set for the output parameter, expand all embedded properties and item metadata in it XmlAttribute taskParameterAttribute = null; // Lookup attribute name needs to be case-insensitive // DevDiv bugs: 33981 foreach (XmlAttribute taskNodeAttribute in taskNode.Attributes) { if (String.Compare(taskNodeAttribute.Name, taskParameterName, StringComparison.OrdinalIgnoreCase) == 0) { taskParameterAttribute = taskNodeAttribute; break; } } if (taskParameterAttribute != null) { if (taskOutputSpecification.IsItemVector) { // This is an output item. ErrorUtilities.VerifyThrow((itemName != null) && (itemName.Length > 0), "Need item type."); // Expand only with properties first, so that expressions like Include="@(foo)" will transfer the metadata of the "foo" items as well, not just their item specs. Expander propertyAndMetadataExpander = new Expander(bucket.Expander, ExpanderOptions.ExpandPropertiesAndMetadata); List<string> outputItemSpecs = propertyAndMetadataExpander.ExpandAllIntoStringListLeaveEscaped(taskParameterAttribute); foreach (string outputItemSpec in outputItemSpecs) { BuildItemGroup items = bucket.Expander.ExpandSingleItemListExpressionIntoItemsLeaveEscaped(outputItemSpec, taskParameterAttribute); // if the output item-spec is an item vector, get the items in it if (items != null) { foreach (BuildItem item in items) { // we want to preserve the attributes on the item BuildItem clonedItem = item.VirtualClone(); // but we do need to change the item type clonedItem.Name = itemName; lookup.AddNewItem(clonedItem); } } else { // if the output item-spec is not an item vector, accept it as-is lookup.AddNewItem(new BuildItem(itemName, outputItemSpec)); } } } else { // This is an output property. Debug.Assert(taskOutputSpecification.IsProperty); ErrorUtilities.VerifyThrow((propertyName != null) && (propertyName.Length > 0), "Need property name."); string taskParameterValue = bucket.Expander.ExpandAllIntoString(taskParameterAttribute); if (taskParameterValue.Length > 0) { lookup.SetProperty(new BuildProperty(propertyName, taskParameterValue, PropertyType.OutputProperty)); } } } }
/// <summary> /// Execute a single bucket /// </summary> /// <returns>true if execution succeeded</returns> private bool ExecuteBucket(EngineProxy engineProxy, ItemBucket bucket, int bucketNumber, TaskExecutionMode howToExecuteTask) { if ( (this.conditionAttribute != null) && !Utilities.EvaluateCondition(this.conditionAttribute.Value, this.conditionAttribute, bucket.Expander, null, ParserOptions.AllowAll, loggingServices, buildEventContext) ) { // Condition is false if (howToExecuteTask == TaskExecutionMode.ExecuteTaskAndGatherOutputs) { if (!loggingServices.OnlyLogCriticalEvents) { // Expand the expression for the Log. string expanded = bucket.Expander.ExpandAllIntoString(this.conditionAttribute); // Whilst we are within the processing of the task, we haven't actually started executing it, so // our skip task message needs to be in the context of the target. However any errors should be reported // at the point where the task appears in the project. BuildEventContext skipTaskContext = new BuildEventContext(buildEventContext.NodeId, buildEventContext.TargetId, buildEventContext.ProjectContextId, BuildEventContext.InvalidTaskId); loggingServices.LogComment(skipTaskContext, "TaskSkippedFalseCondition", TaskName, this.conditionAttribute.Value, expanded); } } return true; } bool taskExecutedSuccessfully = true; // Condition is true if (howToExecuteTask == TaskExecutionMode.ExecuteTaskAndGatherOutputs) { // Now that we know we will need to execute the task, // Ensure the TaskEngine is initialized with the task class // This does the work of task discovery, if it // hasn't already been done. bool taskClassWasFound = FindTask(); if (!taskClassWasFound) { // Task wasn't discovered, we cannot continue return false; } // Now instantiate, initialize, and execute the task ITask task; // If this is the first bucket use the task context originally given to it, for the remaining buckets get a unique id for them if (bucketNumber != 0) { // Ask the parent engine the next Id which should be used for the taskId. buildEventContext = new BuildEventContext(buildEventContext.NodeId, buildEventContext.TargetId, buildEventContext.ProjectContextId, parentModule.GetNextTaskId()); // For each batch the engineProxy needs to have the correct buildEventContext as all messages comming from a task will have the buildEventContext of the EngineProxy. engineProxy.BuildEventContext = buildEventContext; } loggingServices.LogTaskStarted(buildEventContext, TaskName, parentProjectFullFileName, projectFileOfTaskNode); AppDomain taskAppDomain = PrepareAppDomain(); bool taskResult = false; try { task = InstantiateTask(taskAppDomain); // If task cannot be instantiated, we consider its declaration/usage to be invalid. ProjectErrorUtilities.VerifyThrowInvalidProject(task != null, taskNode, "TaskDeclarationOrUsageError", TaskName); taskExecutedSuccessfully = ExecuteInstantiatedTask(engineProxy, bucket, howToExecuteTask, task, out taskResult); if (lookupHash != null) { List<string> overrideMessages = bucket.Lookup.GetPropertyOverrideMessages(lookupHash); if (overrideMessages != null) { foreach (string s in overrideMessages) { loggingServices.LogCommentFromText(buildEventContext, MessageImportance.Low, s); } } } } catch (InvalidProjectFileException e) { // Make sure the Invalid Project error gets logged *before* TaskFinished. Otherwise, // the log is confusing. loggingServices.LogInvalidProjectFileError(buildEventContext, e); throw; } finally { // Flag the completion of the task. loggingServices.LogTaskFinished( buildEventContext, TaskName, parentProjectFullFileName, projectFileOfTaskNode, taskResult); task = null; if (taskAppDomain != null) { AppDomain.Unload(taskAppDomain); taskAppDomain = null; } } } else { Debug.Assert(howToExecuteTask == TaskExecutionMode.InferOutputsOnly); ErrorUtilities.VerifyThrow(GatherTaskOutputs(howToExecuteTask, null, bucket), "The method GatherTaskOutputs() should never fail when inferring task outputs."); if (lookupHash != null) { List<string> overrideMessages = bucket.Lookup.GetPropertyOverrideMessages(lookupHash); if (overrideMessages != null) { foreach (string s in overrideMessages) { loggingServices.LogCommentFromText(buildEventContext, MessageImportance.Low, s); } } } } return taskExecutedSuccessfully; }
/// <summary> /// Given an instantiated task, this helper method sets the specified vector parameter. Vector parameters can be composed /// of multiple item vectors. The semicolon is the only separator allowed, and white space around the semicolon is /// ignored. Any item separator strings are not allowed, and embedded item vectors are not allowed. /// </summary> /// <remarks>This method is marked "internal" for unit-testing purposes only -- it should be "private" ideally.</remarks> /// <example> /// If @(CPPFiles) is a vector for the files a.cpp and b.cpp, and @(IDLFiles) is a vector for the files a.idl and b.idl: /// /// "@(CPPFiles)" converts to { a.cpp, b.cpp } /// /// "@(CPPFiles); c.cpp; @(IDLFiles); c.idl" converts to { a.cpp, b.cpp, c.cpp, a.idl, b.idl, c.idl } /// /// "@(CPPFiles,';')" converts to <error> /// /// "xxx@(CPPFiles)xxx" converts to <error> /// </example> /// <returns>true, if successful</returns> internal bool InitializeTaskVectorParameter ( ITask task, XmlAttribute taskParameterAttribute, bool isRequired, PropertyInfo parameter, Type parameterType, string parameterValue, ItemBucket bucket, out bool taskParameterSet ) { ErrorUtilities.VerifyThrow(parameterValue != null, "Didn't expect null parameterValue in InitializeTaskVectorParameter"); taskParameterSet = false; bool success = false; ArrayList finalTaskInputs = new ArrayList(); List<TaskItem> finalTaskItems = bucket.Expander.ExpandAllIntoTaskItems(parameterValue, taskParameterAttribute); int i = 0; try { // If the task parameter is not a ITaskItem[], then we need to convert // all the TaskItem's in our arraylist to the appropriate datatype. if (parameterType != typeof(ITaskItem[])) { // Loop through all the TaskItems in our arraylist, and convert them. for (i = 0; i < finalTaskItems.Count; i++) { if (parameterType == typeof(string[])) { finalTaskInputs.Add(finalTaskItems[i].ItemSpec); } else if (parameterType == typeof(bool[])) { finalTaskInputs.Add(ConversionUtilities.ConvertStringToBool(finalTaskItems[i].ItemSpec)); } else { finalTaskInputs.Add(Convert.ChangeType(finalTaskItems[i].ItemSpec, parameterType.GetElementType(), CultureInfo.InvariantCulture)); } } } else { finalTaskInputs.AddRange(finalTaskItems); } // If there were no items, don't change the parameter's value. EXCEPT if it's marked as a required // parameter, in which case we made an explicit decision to pass in an empty array. This is // to avoid project authors having to add Conditions on all their tasks to avoid calling them // when a particular item list is empty. This way, we just call the task with an empty list, // the task will loop over an empty list, and return quickly. if ((finalTaskInputs.Count > 0) || (isRequired)) { // Send the array into the task parameter. success = SetTaskParameter(task, parameter, finalTaskInputs.ToArray(parameterType.GetElementType())); taskParameterSet = true; } else { success = true; } } // Handle invalid type. catch (InvalidCastException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", finalTaskItems[i].ItemSpec, parameter.Name, parameterType, TaskName); } // Handle argument exception (thrown by ConvertStringToBool) catch (ArgumentException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", finalTaskItems[i].ItemSpec, parameter.Name, parameterType, TaskName); } // Handle bad string representation of a type. catch (FormatException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", finalTaskItems[i].ItemSpec, parameter.Name, parameterType, TaskName); } // Handle overflow when converting string representation of a numerical type. catch (OverflowException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", finalTaskItems[i].ItemSpec, parameter.Name, parameterType, TaskName); } return success; }
/// <summary> /// Given an instantiated task, this helper method sets the specified scalar parameter based on its type. /// </summary> /// <remarks>This is "internal" only for the purpose of unit testing. Otherwise, it should be "private".</remarks> /// <returns>true, if successful</returns> internal bool InitializeTaskScalarParameter ( ITask task, XmlAttribute taskParameterAttribute, PropertyInfo parameter, Type parameterType, string parameterValue, ItemBucket bucket, out bool taskParameterSet ) { taskParameterSet = false; bool success = false; try { if (parameterType == typeof(ITaskItem)) { // We don't know how many items we're going to end up with, but we'll // keep adding them to this arraylist as we find them. List<TaskItem> finalTaskItems = bucket.Expander.ExpandAllIntoTaskItems(parameterValue, taskParameterAttribute); if (finalTaskItems.Count == 0) { success = true; } else { if (finalTaskItems.Count != 1) { // We only allow a single item to be passed into a parameter of ITaskItem. // Some of the computation (expansion) here is expensive, so don't make the above // "if" statement directly part of the first param to VerifyThrowInvalidProject. ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "CannotPassMultipleItemsIntoScalarParameter", bucket.Expander.ExpandAllIntoString(parameterValue, taskParameterAttribute), parameter.Name, parameterType, TaskName); } success = SetTaskParameter(task, parameter, (ITaskItem)finalTaskItems[0]); taskParameterSet = true; } } else { // Expand out all the metadata, properties, and item vectors in the string. string expandedParameterValue = bucket.Expander.ExpandAllIntoString(parameterValue, taskParameterAttribute); if (expandedParameterValue.Length == 0) { success = true; } // Convert the string to the appropriate datatype, and set the task's parameter. else if (parameterType == typeof(bool)) { success = SetTaskParameter(task, parameter, ConversionUtilities.ConvertStringToBool(expandedParameterValue)); taskParameterSet = true; } else if (parameterType == typeof(string)) { success = SetTaskParameter(task, parameter, expandedParameterValue); taskParameterSet = true; } else { success = SetTaskParameter(task, parameter, Convert.ChangeType(expandedParameterValue, parameterType, CultureInfo.InvariantCulture)); taskParameterSet = true; } } } // handle invalid type catch (InvalidCastException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", bucket.Expander.ExpandAllIntoString(parameterValue, taskParameterAttribute), parameter.Name, parameterType, TaskName); } // handle argument exception (thrown by ConvertStringToBool) catch (ArgumentException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", bucket.Expander.ExpandAllIntoString(parameterValue, taskParameterAttribute), parameter.Name, parameterType, TaskName); } // handle bad string representation of a type catch (FormatException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", bucket.Expander.ExpandAllIntoString(parameterValue, taskParameterAttribute), parameter.Name, parameterType, TaskName); } // handle overflow when converting string representation of a numerical type catch (OverflowException) { ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskParameterAttribute, "InvalidTaskParameterValueError", bucket.Expander.ExpandAllIntoString(parameterValue, taskParameterAttribute), parameter.Name, parameterType, TaskName); } return success; }
/// <summary> /// Given an instantiated task, this helper method sets the specified parameter based on its type. /// </summary> /// <returns>true, if successful</returns> private bool InitializeTaskParameter ( ITask task, XmlAttribute taskParameterAttribute, bool isRequired, ItemBucket bucket, out bool taskParameterSet ) { bool success = false; taskParameterSet = false; try { string parameterName = taskParameterAttribute.Name; string parameterValue = taskParameterAttribute.Value; try { // check if the task has a .NET property corresponding to the parameter PropertyInfo parameter = TaskClass.GetProperty(parameterName); if (parameter != null) { ProjectErrorUtilities.VerifyThrowInvalidProject(parameter.CanWrite, taskParameterAttribute, "SetAccessorNotAvailableOnTaskParameter", parameterName, TaskName); Type parameterType = parameter.PropertyType; // try to set the parameter if (parameterType.IsValueType || (parameterType == typeof(string)) || (parameterType == typeof(ITaskItem))) { success = InitializeTaskScalarParameter(task, taskParameterAttribute, parameter, parameterType, parameterValue, bucket, out taskParameterSet); } else if ((parameterType.IsArray && parameterType.GetElementType().IsValueType) || (parameterType == typeof(string[])) || (parameterType == typeof(ITaskItem[]))) { success = InitializeTaskVectorParameter(task, taskParameterAttribute, isRequired, parameter, parameterType, parameterValue, bucket, out taskParameterSet); } else { loggingServices.LogError(buildEventContext, Utilities.CreateBuildEventFileInfo(taskParameterAttribute, projectFileOfTaskNode), "UnsupportedTaskParameterTypeError", parameterType, parameter.Name, TaskName); } if (!success) { // flag an error if the parameter could not be set loggingServices.LogError(buildEventContext, Utilities.CreateBuildEventFileInfo(taskParameterAttribute, projectFileOfTaskNode), "InvalidTaskAttributeError", parameterName, parameterValue, TaskName); } } else { // flag an error if we find a parameter that has no .NET property equivalent loggingServices.LogError(buildEventContext, Utilities.CreateBuildEventFileInfo(taskParameterAttribute, projectFileOfTaskNode), "UnexpectedTaskAttribute", parameterName, TaskName); } } catch (AmbiguousMatchException) { loggingServices.LogError(buildEventContext, Utilities.CreateBuildEventFileInfo(taskParameterAttribute, projectFileOfTaskNode), "AmbiguousTaskParameterError", TaskName, parameterName); } } catch (Exception e) // Catching Exception, but rethrowing unless it's a well-known exception. { if (ExceptionHandling.NotExpectedReflectionException(e)) throw; // Reflection related exception loggingServices.LogError(buildEventContext, CreateBuildEventFileInfoForTask(), "TaskParametersError", TaskName, e.Message); success = false; } return success; }
/// <summary> /// Partitions the items consumed by the batchable object into buckets, where each bucket contains a set of items that /// have the same value set on all item metadata consumed by the object. /// </summary> /// <remarks> /// PERF NOTE: Given n items and m batching metadata that produce l buckets, it is usually the case that n > l > m, /// because a batchable object typically uses one or two item metadata to control batching, and only has a handful of /// buckets. The number of buckets is typically only large if a batchable object is using single-item batching /// (where l == n). Any algorithm devised for bucketing therefore, should try to minimize n and l in its complexity /// equation. The algorithm below has a complexity of O(n*lg(l)*m/2) in its comparisons, and is effectively O(n) when /// l is small, and O(n*lg(n)) in the worst case as l -> n. However, note that the comparison complexity is not the /// same as the operational complexity for this algorithm. The operational complexity of this algorithm is actually /// O(n*m + n*lg(l)*m/2 + n*l/2 + n + l), which is effectively O(n^2) in the worst case. The additional complexity comes /// from the array and metadata operations that are performed. However, those operations are extremely cheap compared /// to the comparison operations, which dominate the time spent in this method. /// </remarks> /// <returns>ArrayList containing ItemBucket objects (can be empty), each one representing an execution batch.</returns> private static ArrayList BucketConsumedItems ( XmlNode parentNode, Lookup lookup, Hashtable itemListsToBeBatched, Dictionary <string, MetadataReference> consumedMetadataReferences ) { ErrorUtilities.VerifyThrow(itemListsToBeBatched.Count > 0, "Need item types consumed by the batchable object."); ErrorUtilities.VerifyThrow(consumedMetadataReferences.Count > 0, "Need item metadata consumed by the batchable object."); ArrayList buckets = new ArrayList(); // Get and iterate through the list of item names that we're supposed to batch on. foreach (DictionaryEntry entry in itemListsToBeBatched) { string itemName = (string)entry.Key; // Use the previously-fetched items, if possible BuildItemGroup items; if (entry.Value is BuildItemGroup) { items = (BuildItemGroup)entry.Value; } else { items = lookup.GetItems(itemName); } if (items != null) { foreach (BuildItem item in items) { // Get this item's values for all the metadata consumed by the batchable object. Dictionary <string, string> itemMetadataValues = GetItemMetadataValues(parentNode, item, consumedMetadataReferences); // put the metadata into a dummy bucket we can use for searching ItemBucket dummyBucket = ItemBucket.GetDummyBucketForComparisons(itemMetadataValues); // look through all previously created buckets to find a bucket whose items have the same values as // this item for all metadata consumed by the batchable object int matchingBucketIndex = buckets.BinarySearch(dummyBucket); ItemBucket matchingBucket = (matchingBucketIndex >= 0) ? (ItemBucket)buckets[matchingBucketIndex] : null; // If we didn't find a bucket that matches this item, create a new one, adding // this item to the bucket. if (null == matchingBucket) { matchingBucket = new ItemBucket(itemListsToBeBatched.Keys, itemMetadataValues, lookup, buckets.Count); // make sure to put the new bucket into the appropriate location // in the sorted list as indicated by the binary search // NOTE: observe the ~ operator (bitwise complement) in front of // the index -- see MSDN for more information on the return value // from the ArrayList.BinarySearch() method buckets.Insert(~matchingBucketIndex, matchingBucket); } // We already have a bucket for this type of item, so add this item to // the bucket. matchingBucket.AddItem(item); } } } // Put the buckets back in the order in which they were discovered, so that the first // item declared in the project file ends up in the first batch passed into the target/task. ArrayList orderedBuckets = ArrayList.Repeat(null, buckets.Count); foreach (ItemBucket bucket in buckets) { orderedBuckets[bucket.BucketSequenceNumber] = bucket; } return(orderedBuckets); }
/// <summary> /// Given an instantiated task, this method initializes it, and sets all the task parameters (which are defined as /// properties of the task class). /// </summary> /// <remarks> /// This method is internal for unit-testing purposes only. /// </remarks> /// <returns>true, if successful</returns> internal bool InitializeTask(ITask task, ItemBucket bucket, EngineProxy engineProxy) { try { task.BuildEngine = engineProxy; task.HostObject = hostObject; } // if a logger has failed, abort immediately catch (LoggerException) { // Polite logger failure throw; } catch (InternalLoggerException) { // Logger threw arbitrary exception throw; } // handle any exception thrown by the task during initialization catch (Exception e) { // NOTE: We catch ALL exceptions here, to attempt to completely isolate the Engine // from failures in the task. Probably we should try to avoid catching truly fatal exceptions, // e.g., StackOverflowException loggingServices.LogFatalTaskError(buildEventContext, e, // Display the task's exception stack. // Log the task line number, whatever the value of ContinueOnError; // because InitializeTask failure will be a hard error anyway. CreateBuildEventFileInfoForTask(), TaskName); return false; } bool taskInitialized = InitializeTaskParameters(task, bucket); return taskInitialized; }
/// <summary> /// Creates an instance of a MockTask, and returns the objects necessary to exercise /// taskEngine.InitializeTask /// </summary> /// <param name="taskNode"></param> /// <param name="taskEngine"></param> /// <param name="mockTask"></param> /// <param name="itemBucket"></param> /// <owner>RGoel</owner> private void InstantiateMockTaskHelper ( XmlElement taskNode, out TaskEngine taskEngine, out MockTask mockTask, out ItemBucket itemBucket, out EngineProxy engineProxy, string condition ) { LoadedType taskClass = new LoadedType(typeof(MockTask), new AssemblyLoadInfo(typeof(MockTask).Assembly.FullName, null)); Engine engine = new Engine(@"c:\"); Project project = new Project(engine); EngineCallback engineCallback = new EngineCallback(engine); TaskExecutionModule taskExecutionModule = new TaskExecutionModule(engineCallback, TaskExecutionModule.TaskExecutionModuleMode.SingleProcMode, false); ProjectBuildState buildContext = new ProjectBuildState(null, null, new BuildEventContext(0, 1, 1, 1)); int nodeProxyID = engineCallback.CreateTaskContext(project, null, buildContext, taskNode, EngineCallback.inProcNode, new BuildEventContext(BuildEventContext.InvalidNodeId, BuildEventContext.InvalidTargetId, BuildEventContext.InvalidProjectContextId, BuildEventContext.InvalidTaskId)); taskEngine = new TaskEngine ( taskNode, null, /* host object */ "In Memory", project.FullFileName, engine.LoggingServices, nodeProxyID, taskExecutionModule, new BuildEventContext(0, 1, 1, 1) ); taskEngine.TaskClass = taskClass; engineProxy = new EngineProxy(taskExecutionModule, nodeProxyID, project.FullFileName, project.FullFileName, engine.LoggingServices, null); mockTask = new MockTask(new EngineProxy(taskExecutionModule, nodeProxyID, project.FullFileName, project.FullFileName, engine.LoggingServices, null)); // The code below creates an item table that is equivalent to the following MSBuild syntax: // // <ItemGroup> // <ItemListContainingOneItem Include="a.cs"> // <Culture>fr-fr</Culture> // </ItemListContainingOneItem> // // <ItemListContainingTwoItems Include="b.cs"> // <HintPath>c:\foo</HintPath> // </ItemListContainingTwoItems> // <ItemListContainingTwoItems Include="c.cs"> // <HintPath>c:\bar</HintPath> // </ItemListContainingTwoItems> // </ItemGroup> // Hashtable itemsByName = new Hashtable(StringComparer.OrdinalIgnoreCase); BuildItemGroup itemListContainingOneItem = new BuildItemGroup(); BuildItem a = itemListContainingOneItem.AddNewItem("ItemListContainingOneItem", "a.cs"); a.SetMetadata("Culture", "fr-fr"); itemsByName["ItemListContainingOneItem"] = itemListContainingOneItem; BuildItemGroup itemListContainingTwoItems = new BuildItemGroup(); BuildItem b = itemListContainingTwoItems.AddNewItem("ItemListContainingTwoItems", "b.cs"); b.SetMetadata("HintPath", "c:\\foo"); BuildItem c = itemListContainingTwoItems.AddNewItem("ItemListContainingTwoItems", "c.cs"); c.SetMetadata("HintPath", "c:\\bar"); itemsByName["ItemListContainingTwoItems"] = itemListContainingTwoItems; itemBucket = new ItemBucket(new string[0], new Dictionary<string, string>(), LookupHelpers.CreateLookup(itemsByName), 0); }
/// <summary> /// Sets all the task parameters, using the provided bucket's lookup. /// </summary> private bool InitializeTaskParameters(ITask task, ItemBucket bucket) { bool taskInitialized = true; // Get the properties that exist on this task. We need to gather all of the ones that are marked // "required" so that we can keep track of whether or not they all get set. Dictionary<string, string> setParameters = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); Dictionary<string, string> requiredParameters = GetNamesOfPropertiesWithRequiredAttribute(); // look through all the attributes of the task element foreach (XmlAttribute taskAttribute in taskNode.Attributes) { // skip the known "special" task attributes if (!XMakeAttributes.IsSpecialTaskAttribute(taskAttribute.Name)) { bool taskParameterSet = false; // Did we actually call the setter on this task parameter? bool success = InitializeTaskParameter(task, taskAttribute, requiredParameters.ContainsKey(taskAttribute.Name), bucket, out taskParameterSet); if (!success) { // stop processing any more attributes taskInitialized = false; break; } else if (taskParameterSet) { // Keep track that we've set a value for this property. Note that this will // keep track of non-required properties as well, but that's okay. We just // to check at the end that there are no values in the requiredParameters // table that aren't also in the setParameters table. setParameters[taskAttribute.Name] = String.Empty; } } } if (taskInitialized) { // See if any required properties were not set foreach (KeyValuePair<string, string> requiredParameter in requiredParameters) { ProjectErrorUtilities.VerifyThrowInvalidProject(setParameters.ContainsKey(requiredParameter.Key), taskNode, "RequiredPropertyNotSetError", TaskName, requiredParameter.Key); } } return taskInitialized; }
/// <summary> /// Execute a task object for a given bucket. /// </summary> /// <param name="engineProxy"></param> /// <param name="bucket"></param> /// <param name="howToExecuteTask"></param> /// <param name="task"></param> /// <param name="taskResult">Whether the task returned true from Execute</param> /// <returns>true if task executed successfully (possibly failed but continueOnError=true)</returns> private bool ExecuteInstantiatedTask(EngineProxy engineProxy, ItemBucket bucket, TaskExecutionMode howToExecuteTask, ITask task, out bool taskResult) { UpdateContinueOnError(bucket, engineProxy); taskResult = false; bool taskExecutedSuccessfully = true; if (!InitializeTask(task, bucket, engineProxy)) { // The task cannot be initialized. ProjectErrorUtilities.VerifyThrowInvalidProject(false, taskNode, "TaskParametersError", TaskName, String.Empty); } else { bool taskReturned = false; try { taskResult = task.Execute(); taskReturned = true; } // if a logger has failed, abort immediately catch (LoggerException) { // Polite logger failure throw; } catch (InternalLoggerException) { // Logger threw arbitrary exception throw; } // handle any exception thrown by the task during execution // NOTE: We catch ALL exceptions here, to attempt to completely isolate the Engine // from failures in the task. Probably we should try to avoid catching truly fatal exceptions, // e.g., StackOverflowException catch (Exception e) { if (continueOnError) { loggingServices.LogTaskWarningFromException(buildEventContext, e, // Don't try and log the line/column number for this error if // ContinueOnError=true, because it's too expensive to do so, // and this error may be fairly common and expected. new BuildEventFileInfo(projectFileOfTaskNode), TaskName); // Log a message explaining why we converted the previous error into a warning. loggingServices.LogComment(buildEventContext, MessageImportance.Normal, "ErrorConvertedIntoWarning"); } else { loggingServices.LogFatalTaskError(buildEventContext, e, CreateBuildEventFileInfoForTask(), TaskName); } } // If the task returned attempt to gather its outputs. If gathering outputs fails set the taskResults // to false if (taskReturned) { taskResult = GatherTaskOutputs(howToExecuteTask, task, bucket) && taskResult; } // If the taskResults are false look at ContinueOnError. If ContinueOnError=false (default) // mark the taskExecutedSuccessfully=false. Otherwise let the task succeed but log a normal // pri message that says this task is continuing because ContinueOnError=true if (!taskResult) { if (!continueOnError) { taskExecutedSuccessfully = false; } else { loggingServices.LogComment(buildEventContext, MessageImportance.Normal, "TaskContinuedDueToContinueOnError", "ContinueOnError", TaskName, "true"); } } } return taskExecutedSuccessfully; }
/// <summary> /// Gathers task outputs in two ways: /// 1) Given an instantiated task that has finished executing, it extracts the outputs using .NET reflection. /// 2) Otherwise, it parses the task's output specifications and (statically) infers the outputs. /// </summary> /// <param name="howToExecuteTask"></param> /// <param name="task"></param> /// <param name="bucket"></param> /// <returns>true, if successful</returns> private bool GatherTaskOutputs(TaskExecutionMode howToExecuteTask, ITask task, ItemBucket bucket) { bool gatheredTaskOutputsSuccessfully = true; foreach (TaskOutput taskOutputSpecification in GetTaskOutputSpecifications(true)) { // if the task's outputs are supposed to be gathered if ( (taskOutputSpecification.ConditionAttribute == null) || Utilities.EvaluateCondition(taskOutputSpecification.ConditionAttribute.Value, taskOutputSpecification.ConditionAttribute, bucket.Expander, null, ParserOptions.AllowAll, loggingServices, buildEventContext) ) { ErrorUtilities.VerifyThrow(taskOutputSpecification.TaskParameterAttribute != null, "Invalid task output specification -- this should have been caught when the <Output> XML was parsed."); ErrorUtilities.VerifyThrow(taskOutputSpecification.TaskParameterAttribute.Value.Length > 0, "Invalid task output specification -- this should have been caught when the <Output> XML was parsed."); // expand all embedded properties, item metadata and item vectors in the task parameter name string taskParameterName = bucket.Expander.ExpandAllIntoString(taskOutputSpecification.TaskParameterAttribute); ProjectErrorUtilities.VerifyThrowInvalidProject(taskParameterName.Length > 0, taskOutputSpecification.TaskParameterAttribute, "InvalidEvaluatedAttributeValue", taskParameterName, taskOutputSpecification.TaskParameterAttribute.Value, XMakeAttributes.taskParameter, XMakeElements.output); string itemName = null; string propertyName = null; // check where the outputs are going -- into a vector, or a property? if (taskOutputSpecification.IsItemVector) { ErrorUtilities.VerifyThrow(taskOutputSpecification.ItemNameAttribute != null, "How can it be an output item if the item name is null? This should have been caught when the <Output> XML was parsed."); ErrorUtilities.VerifyThrow(taskOutputSpecification.ItemNameAttribute.Value.Length > 0, "Invalid task output specification -- this should have been caught when the <Output> XML was parsed."); // expand all embedded properties, item metadata and item vectors in the item type name itemName = bucket.Expander.ExpandAllIntoString(taskOutputSpecification.ItemNameAttribute); ProjectErrorUtilities.VerifyThrowInvalidProject(itemName.Length > 0, taskOutputSpecification.ItemNameAttribute, "InvalidEvaluatedAttributeValue", itemName, taskOutputSpecification.ItemNameAttribute.Value, XMakeAttributes.itemName, XMakeElements.output); } else { ErrorUtilities.VerifyThrow(taskOutputSpecification.IsProperty, "Invalid task output specification -- this should have been caught when the <Output> XML was parsed."); ErrorUtilities.VerifyThrow(taskOutputSpecification.PropertyNameAttribute != null, "How can it be an output property if the property name is null? This should have been caught when the <Output> XML was parsed."); ErrorUtilities.VerifyThrow(taskOutputSpecification.PropertyNameAttribute.Value.Length > 0, "Invalid task output specification -- this should have been caught when the <Output> XML was parsed."); // expand all embedded properties, item metadata and item vectors in the property name propertyName = bucket.Expander.ExpandAllIntoString(taskOutputSpecification.PropertyNameAttribute); ProjectErrorUtilities.VerifyThrowInvalidProject(propertyName.Length > 0, taskOutputSpecification.PropertyNameAttribute, "InvalidEvaluatedAttributeValue", propertyName, taskOutputSpecification.PropertyNameAttribute.Value, XMakeAttributes.propertyName, XMakeElements.output); } // if we're gathering outputs by .NET reflection if (howToExecuteTask == TaskExecutionMode.ExecuteTaskAndGatherOutputs) { gatheredTaskOutputsSuccessfully = GatherGeneratedTaskOutputs(bucket.Lookup, taskOutputSpecification, taskParameterName, itemName, propertyName, task); } // if we're inferring outputs based on information in the task and <Output> tags else { Debug.Assert(howToExecuteTask == TaskExecutionMode.InferOutputsOnly); InferTaskOutputs(bucket.Lookup, taskOutputSpecification, taskParameterName, itemName, propertyName, bucket); } } if (!gatheredTaskOutputsSuccessfully) { break; } } return gatheredTaskOutputsSuccessfully; }
/// <summary> /// Compares the target's inputs against its outputs to determine if the target needs to be built/rebuilt/skipped. /// </summary> /// <remarks> /// The collections of changed and up-to-date inputs returned from this method are valid IFF this method decides an /// incremental build is needed. /// </remarks> /// <owner>SumedhK</owner> /// <param name="bucket"></param> /// <param name="changedTargetInputs"></param> /// <param name="upToDateTargetInputs"></param> /// <returns> /// DependencyAnalysisResult.SkipUpToDate, if target is up-to-date; /// DependencyAnalysisResult.SkipNoInputs, if target has no inputs; /// DependencyAnalysisResult.SkipNoOutputs, if target has no outputs; /// DependencyAnalysisResult.IncrementalBuild, if only some target outputs are out-of-date; /// DependencyAnalysisResult.FullBuild, if target is out-of-date /// </returns> internal DependencyAnalysisResult PerformDependencyAnalysis ( ItemBucket bucket, out Hashtable changedTargetInputs, out Hashtable upToDateTargetInputs ) { // Clear any old dependency analysis logging details dependencyAnalysisDetail.Clear(); uniqueTargetInputs.Clear(); uniqueTargetOutputs.Clear(); ProjectErrorUtilities.VerifyThrowInvalidProject((TargetOutputSpecification.Length > 0) || (TargetInputSpecification.Length == 0), this.TargetToAnalyze.TargetElement, "TargetInputsSpecifiedWithoutOutputs", TargetToAnalyze.Name); DependencyAnalysisResult result = DependencyAnalysisResult.SkipUpToDate; changedTargetInputs = null; upToDateTargetInputs = null; if (TargetOutputSpecification.Length == 0) { // if the target has no output specification, we always build it result = DependencyAnalysisResult.FullBuild; } else { Hashtable itemVectorsInTargetInputs; Hashtable itemVectorTransformsInTargetInputs; Hashtable discreteItemsInTargetInputs; Hashtable itemVectorsInTargetOutputs; Hashtable discreteItemsInTargetOutputs; ArrayList targetOutputItemSpecs; ParseTargetInputOutputSpecifications(bucket, out itemVectorsInTargetInputs, out itemVectorTransformsInTargetInputs, out discreteItemsInTargetInputs, out itemVectorsInTargetOutputs, out discreteItemsInTargetOutputs, out targetOutputItemSpecs); ArrayList itemVectorsReferencedInBothTargetInputsAndOutputs; ArrayList itemVectorsReferencedOnlyInTargetInputs; ArrayList itemVectorsReferencedOnlyInTargetOutputs = null; // if the target has no outputs because the output specification evaluated to empty if (targetOutputItemSpecs.Count == 0) { result = PerformDependencyAnalysisIfNoOutputs(); } // if there are no discrete output items... else if (discreteItemsInTargetOutputs.Count == 0) { // try to correlate inputs and outputs by checking: // 1) which item vectors are referenced by both input and output items // 2) which item vectors are referenced only by input items // 3) which item vectors are referenced only by output items // NOTE: two item vector transforms cannot be correlated, even if they reference the same item vector, because // depending on the transform expression, there might be no relation between the results of the transforms; as // a result, input items that are item vector transforms are treated as discrete items DiffHashtables(itemVectorsInTargetInputs, itemVectorsInTargetOutputs, out itemVectorsReferencedInBothTargetInputsAndOutputs, out itemVectorsReferencedOnlyInTargetInputs, out itemVectorsReferencedOnlyInTargetOutputs); // if there are no item vectors only referenced by output items... // NOTE: we consider output items that reference item vectors not referenced by any input item, as discrete // items, since we cannot correlate them to any input items if (itemVectorsReferencedOnlyInTargetOutputs.Count == 0) { /* * At this point, we know the following: * 1) the target has outputs * 2) the target has NO discrete outputs * * This implies: * 1) the target only references vectors (incl. transforms) in its outputs * 2) all vectors referenced in the outputs are also referenced in the inputs * 3) the referenced vectors are not empty * * We can thus conclude: the target MUST have (non-discrete) inputs * */ ErrorUtilities.VerifyThrow(itemVectorsReferencedInBothTargetInputsAndOutputs.Count > 0, "The target must have inputs."); Debug.Assert(GetItemSpecsFromItemVectors(itemVectorsInTargetInputs).Count > 0, "The target must have inputs."); result = PerformDependencyAnalysisIfDiscreteInputs(itemVectorsInTargetInputs, itemVectorTransformsInTargetInputs, discreteItemsInTargetInputs, itemVectorsReferencedOnlyInTargetInputs, targetOutputItemSpecs); if (result != DependencyAnalysisResult.FullBuild) { // once the inputs and outputs have been correlated, we can do a 1-to-1 comparison between each input // and its corresponding output, to discover which inputs have changed, and which are up-to-date... result = PerformDependencyAnalysisIfCorrelatedInputsOutputs(itemVectorsInTargetInputs, itemVectorsInTargetOutputs, itemVectorsReferencedInBothTargetInputsAndOutputs, out changedTargetInputs, out upToDateTargetInputs); } } } // if there are any discrete items in the target outputs, then we have no obvious correlation to the inputs they // depend on, since any input can contribute to a discrete output, so we compare all inputs against all outputs // NOTE: output items are considered discrete, if // 1) they do not reference any item vector // 2) they reference item vectors that are not referenced by any input item if ((discreteItemsInTargetOutputs.Count > 0) || ((itemVectorsReferencedOnlyInTargetOutputs != null) && (itemVectorsReferencedOnlyInTargetOutputs.Count > 0))) { result = PerformDependencyAnalysisIfDiscreteOutputs( itemVectorsInTargetInputs, itemVectorTransformsInTargetInputs, discreteItemsInTargetInputs, targetOutputItemSpecs); } if (result == DependencyAnalysisResult.SkipUpToDate) { loggingService.LogComment(buildEventContext, MessageImportance.Normal, "SkipTargetBecauseOutputsUpToDate", TargetToAnalyze.Name); // Log the target inputs & outputs if (!loggingService.OnlyLogCriticalEvents) { string inputs = null; string outputs = null; // Extract the unique inputs and outputs gatheres during TLDA ExtractUniqueInputsAndOutputs(out inputs, out outputs); if (inputs != null) { loggingService.LogComment(buildEventContext, MessageImportance.Low, "SkipTargetUpToDateInputs", inputs); } if (outputs != null) { loggingService.LogComment(buildEventContext, MessageImportance.Low, "SkipTargetUpToDateOutputs", outputs); } } } } LogReasonForBuildingTarget(result); return result; }
private DependencyAnalysisResult PerformDependencyAnalysisTestHelper ( FileWriteInfo [] filesToAnalyze, Hashtable itemsByName, string inputs, string outputs, out Hashtable changedTargetInputs, out Hashtable upToDateTargetInputs ) { List<string> filesToDelete = new List<string>(); try { // first set the disk up for (int i = 0; i < filesToAnalyze.Length; ++i) { string path = ObjectModelHelpers.CreateFileInTempProjectDirectory(filesToAnalyze[i].Path, ""); File.SetLastWriteTime(path, filesToAnalyze[i].LastWriteTime); filesToDelete.Add(path); } // now create the project string unformattedProjectXml = @" <Project ToolsVersion=`3.5` xmlns=`msbuildnamespace`> <Target Name=`Build` Inputs=`{0}` Outputs=`{1}`> </Target> </Project> "; Project p = ObjectModelHelpers.CreateInMemoryProject(String.Format(unformattedProjectXml, inputs, outputs)); // now do the dependency analysis ItemBucket itemBucket = new ItemBucket(null, null, LookupHelpers.CreateLookup(itemsByName), 0); TargetDependencyAnalyzer analyzer = new TargetDependencyAnalyzer(ObjectModelHelpers.TempProjectDir, p.Targets["Build"], p.ParentEngine.LoggingServices, (BuildEventContext)null); return analyzer.PerformDependencyAnalysis(itemBucket, out changedTargetInputs, out upToDateTargetInputs); } finally { // finally clean up foreach (string path in filesToDelete) { if (File.Exists(path)) File.Delete(path); } } }
/// <summary> /// Separates item vectors from discrete items, and discards duplicates. If requested, item vector transforms are also /// separated out. The item vectors (and the transforms) are partitioned by type, since there can be more than one item /// vector of the same type. /// </summary> /// <remarks> /// The item vector collection is a Hashtable of Hashtables, where the top-level Hashtable is indexed by item type, and /// each "partition" Hashtable is indexed by the item vector itself. /// </remarks> /// <owner>SumedhK</owner> /// <param name="attributeContainingItems">The XML attribute which we're operating on here. /// The sole purpose of passing in this parameter is to be able to provide line/column number /// information in the event there's an error.</param> /// <param name="items"></param> /// <param name="bucket"></param> /// <param name="itemVectors"></param> /// <param name="itemVectorTransforms"></param> /// <param name="discreteItems"></param> private void SeparateItemVectorsFromDiscreteItems ( XmlAttribute attributeContainingItems, List<string> items, ItemBucket bucket, out Hashtable itemVectors, Hashtable itemVectorTransforms, out Hashtable discreteItems ) { itemVectors = new Hashtable(StringComparer.OrdinalIgnoreCase); discreteItems = new Hashtable(StringComparer.OrdinalIgnoreCase); foreach (string item in items) { Match itemVectorMatch; BuildItemGroup itemVectorContents = bucket.Expander.ExpandSingleItemListExpressionIntoItemsLeaveEscaped(item, attributeContainingItems, out itemVectorMatch); if (itemVectorContents != null) { Hashtable itemVectorCollection = null; if ((itemVectorTransforms == null) || (itemVectorMatch.Groups["TRANSFORM_SPECIFICATION"].Length == 0)) { itemVectorCollection = itemVectors; } else { itemVectorCollection = itemVectorTransforms; } string itemVectorType = itemVectorMatch.Groups["TYPE"].Value; Hashtable itemVectorCollectionPartition = (Hashtable)itemVectorCollection[itemVectorType]; if (itemVectorCollectionPartition == null) { itemVectorCollectionPartition = new Hashtable(StringComparer.OrdinalIgnoreCase); itemVectorCollection[itemVectorType] = itemVectorCollectionPartition; } itemVectorCollectionPartition[item] = itemVectorContents; ErrorUtilities.VerifyThrow((itemVectorTransforms == null) || (itemVectorCollection.Equals(itemVectorTransforms)) || (itemVectorCollectionPartition.Count == 1), "If transforms have been separated out, there should only be one item vector per partition."); } else { discreteItems[item] = item; } } }
/// <summary> /// Partitions the items consumed by the batchable object into buckets, where each bucket contains a set of items that /// have the same value set on all item metadata consumed by the object. /// </summary> /// <remarks> /// PERF NOTE: Given n items and m batching metadata that produce l buckets, it is usually the case that n > l > m, /// because a batchable object typically uses one or two item metadata to control batching, and only has a handful of /// buckets. The number of buckets is typically only large if a batchable object is using single-item batching /// (where l == n). Any algorithm devised for bucketing therefore, should try to minimize n and l in its complexity /// equation. The algorithm below has a complexity of O(n*lg(l)*m/2) in its comparisons, and is effectively O(n) when /// l is small, and O(n*lg(n)) in the worst case as l -> n. However, note that the comparison complexity is not the /// same as the operational complexity for this algorithm. The operational complexity of this algorithm is actually /// O(n*m + n*lg(l)*m/2 + n*l/2 + n + l), which is effectively O(n^2) in the worst case. The additional complexity comes /// from the array and metadata operations that are performed. However, those operations are extremely cheap compared /// to the comparison operations, which dominate the time spent in this method. /// </remarks> /// <returns>ArrayList containing ItemBucket objects (can be empty), each one representing an execution batch.</returns> private static ArrayList BucketConsumedItems ( XmlNode parentNode, Lookup lookup, Hashtable itemListsToBeBatched, Dictionary<string, MetadataReference> consumedMetadataReferences ) { ErrorUtilities.VerifyThrow(itemListsToBeBatched.Count > 0, "Need item types consumed by the batchable object."); ErrorUtilities.VerifyThrow(consumedMetadataReferences.Count > 0, "Need item metadata consumed by the batchable object."); ArrayList buckets = new ArrayList(); // Get and iterate through the list of item names that we're supposed to batch on. foreach (DictionaryEntry entry in itemListsToBeBatched) { string itemName = (string)entry.Key; // Use the previously-fetched items, if possible BuildItemGroup items; if (entry.Value is BuildItemGroup) { items = (BuildItemGroup)entry.Value; } else { items = lookup.GetItems(itemName); } if (items != null) { foreach (BuildItem item in items) { // Get this item's values for all the metadata consumed by the batchable object. Dictionary<string, string> itemMetadataValues = GetItemMetadataValues(parentNode, item, consumedMetadataReferences); // put the metadata into a dummy bucket we can use for searching ItemBucket dummyBucket = ItemBucket.GetDummyBucketForComparisons(itemMetadataValues); // look through all previously created buckets to find a bucket whose items have the same values as // this item for all metadata consumed by the batchable object int matchingBucketIndex = buckets.BinarySearch(dummyBucket); ItemBucket matchingBucket = (matchingBucketIndex >= 0) ? (ItemBucket)buckets[matchingBucketIndex] : null; // If we didn't find a bucket that matches this item, create a new one, adding // this item to the bucket. if (null == matchingBucket) { matchingBucket = new ItemBucket(itemListsToBeBatched.Keys, itemMetadataValues, lookup, buckets.Count); // make sure to put the new bucket into the appropriate location // in the sorted list as indicated by the binary search // NOTE: observe the ~ operator (bitwise complement) in front of // the index -- see MSDN for more information on the return value // from the ArrayList.BinarySearch() method buckets.Insert(~matchingBucketIndex, matchingBucket); } // We already have a bucket for this type of item, so add this item to // the bucket. matchingBucket.AddItem(item); } } } // Put the buckets back in the order in which they were discovered, so that the first // item declared in the project file ends up in the first batch passed into the target/task. ArrayList orderedBuckets = ArrayList.Repeat(null, buckets.Count); foreach (ItemBucket bucket in buckets) { orderedBuckets[bucket.BucketSequenceNumber] = bucket; } return orderedBuckets; }