private void ExecuteModify(ProjectItemGroupTaskItemInstance child, ItemBucket bucket, ISet <string> keepMetadata, ISet <string> removeMetadata)
        {
            ICollection <ProjectItemInstance> group = bucket.Lookup.GetItems(child.ItemType);

            if (group == null || group.Count == 0)
            {
                // No items of this type to modify
                return;
            }

            // Figure out what metadata names and values we need to set
            var metadataToSet = new Lookup.MetadataModifications(keepMetadata != null);

            // Filter the metadata as appropriate
            if (keepMetadata != null)
            {
                foreach (var metadataName in keepMetadata)
                {
                    metadataToSet[metadataName] = Lookup.MetadataModification.CreateFromNoChange();
                }
            }
            else if (removeMetadata != null)
            {
                foreach (var metadataName in removeMetadata)
                {
                    metadataToSet[metadataName] = Lookup.MetadataModification.CreateFromRemove();
                }
            }

            foreach (ProjectItemGroupTaskMetadataInstance metadataInstance in child.Metadata)
            {
                bool condition = ConditionEvaluator.EvaluateCondition
                                 (
                    metadataInstance.Condition,
                    ParserOptions.AllowAll,
                    bucket.Expander,
                    ExpanderOptions.ExpandAll,
                    Project.Directory,
                    metadataInstance.ConditionLocation,
                    LoggingContext.LoggingService,
                    LoggingContext.BuildEventContext,
                    FileSystems.Default);

                if (condition)
                {
                    string evaluatedValue = bucket.Expander.ExpandIntoStringLeaveEscaped(metadataInstance.Value, ExpanderOptions.ExpandAll, metadataInstance.Location);
                    metadataToSet[metadataInstance.Name] = Lookup.MetadataModification.CreateFromNewValue(evaluatedValue);
                }
            }

            // Now apply the changes.  This must be done after filtering, since explicitly set metadata overrides filters.
            bucket.Lookup.ModifyItems(child.ItemType, group, metadataToSet);
        }
Exemple #2
0
        /// <summary>
        /// Retrieves the error targets for this target.
        /// </summary>
        /// <param name="projectLoggingContext">The project logging context.</param>
        /// <returns>A list of error targets.</returns>
        internal List <TargetSpecification> GetErrorTargets(ProjectLoggingContext projectLoggingContext)
        {
            VerifyState(_state, TargetEntryState.ErrorExecution);
            ErrorUtilities.VerifyThrow(_legacyCallTargetScopes == null, "We should have already left any legacy call target scopes.");

            List <TargetSpecification> allErrorTargets = new List <TargetSpecification>(_target.OnErrorChildren.Count);

            foreach (ProjectOnErrorInstance errorTargetInstance in _target.OnErrorChildren)
            {
                bool condition = ConditionEvaluator.EvaluateCondition
                                 (
                    errorTargetInstance.Condition,
                    ParserOptions.AllowPropertiesAndItemLists,
                    _expander,
                    ExpanderOptions.ExpandPropertiesAndItems,
                    _requestEntry.ProjectRootDirectory,
                    errorTargetInstance.ConditionLocation,
                    projectLoggingContext.LoggingService,
                    projectLoggingContext.BuildEventContext
                                 );

                if (condition)
                {
                    IList <string> errorTargets = _expander.ExpandIntoStringListLeaveEscaped(errorTargetInstance.ExecuteTargets, ExpanderOptions.ExpandPropertiesAndItems, errorTargetInstance.ExecuteTargetsLocation);

                    foreach (string escapedErrorTarget in errorTargets)
                    {
                        string errorTargetName = EscapingUtilities.UnescapeAll(escapedErrorTarget);
                        allErrorTargets.Add(new TargetSpecification(errorTargetName, errorTargetInstance.ExecuteTargetsLocation));
                    }
                }
            }

            // If this target never executed (for instance, because one of its dependencies errored) then we need to
            // create a result for this target to report when it gets to the Completed state.
            if (null == _targetResult)
            {
                _targetResult = new TargetResult(new TaskItem[] { }, new WorkUnitResult(WorkUnitResultCode.Failed, WorkUnitActionCode.Stop, null));
            }

            _state = TargetEntryState.Completed;

            return(allErrorTargets);
        }
Exemple #3
0
        /// <summary>
        /// Runs all of the tasks for this target, batched as necessary.
        /// </summary>
        internal async Task ExecuteTarget(ITaskBuilder taskBuilder, BuildRequestEntry requestEntry, ProjectLoggingContext projectLoggingContext, CancellationToken cancellationToken)
        {
#if MSBUILDENABLEVSPROFILING
            try
            {
                string beginTargetBuild = String.Format(CultureInfo.CurrentCulture, "Build Target {0} in Project {1} - Start", this.Name, projectFullPath);
                DataCollection.CommentMarkProfile(8800, beginTargetBuild);
#endif

            try
            {
                VerifyState(_state, TargetEntryState.Execution);
                ErrorUtilities.VerifyThrow(!_isExecuting, "Target {0} is already executing", _target.Name);
                _cancellationToken = cancellationToken;
                _isExecuting       = true;

                // Generate the batching buckets.  Note that each bucket will get a lookup based on the baseLookup.  This lookup will be in its
                // own scope, which we will collapse back down into the baseLookup at the bottom of the function.
                List <ItemBucket> buckets = BatchingEngine.PrepareBatchingBuckets(GetBatchableParametersForTarget(), _baseLookup, _target.Location);

                WorkUnitResult       aggregateResult      = new WorkUnitResult();
                TargetLoggingContext targetLoggingContext = null;
                bool   targetSuccess   = false;
                int    numberOfBuckets = buckets.Count;
                string projectFullPath = requestEntry.RequestConfiguration.ProjectFullPath;

                string parentTargetName = null;
                if (ParentEntry != null && ParentEntry.Target != null)
                {
                    parentTargetName = ParentEntry.Target.Name;
                }

                for (int i = 0; i < numberOfBuckets; i++)
                {
                    ItemBucket bucket = buckets[i];

                    // If one of the buckets failed, stop building.
                    if (aggregateResult.ActionCode == WorkUnitActionCode.Stop)
                    {
                        break;
                    }

                    targetLoggingContext = projectLoggingContext.LogTargetBatchStarted(projectFullPath, _target, parentTargetName);
                    WorkUnitResult bucketResult = null;
                    targetSuccess = false;

                    Lookup.Scope entryForInference = null;
                    Lookup.Scope entryForExecution = null;

                    try
                    {
                        // This isn't really dependency analysis.  This is up-to-date checking.  Based on this we will be able to determine if we should
                        // run tasks in inference or execution mode (or both) or just skip them altogether.
                        ItemDictionary <ProjectItemInstance> changedTargetInputs;
                        ItemDictionary <ProjectItemInstance> upToDateTargetInputs;
                        Lookup lookupForInference;
                        Lookup lookupForExecution;

                        // UNDONE: (Refactor) Refactor TargetUpToDateChecker to take a logging context, not a logging service.
                        TargetUpToDateChecker    dependencyAnalyzer = new TargetUpToDateChecker(requestEntry.RequestConfiguration.Project, _target, targetLoggingContext.LoggingService, targetLoggingContext.BuildEventContext);
                        DependencyAnalysisResult dependencyResult   = dependencyAnalyzer.PerformDependencyAnalysis(bucket, out changedTargetInputs, out upToDateTargetInputs);

                        switch (dependencyResult)
                        {
                        // UNDONE: Need to enter/leave debugger scope properly for the <Target> element.
                        case DependencyAnalysisResult.FullBuild:
                        case DependencyAnalysisResult.IncrementalBuild:
                        case DependencyAnalysisResult.SkipUpToDate:
                            // Create the lookups used to hold the current set of properties and items
                            lookupForInference = bucket.Lookup;
                            lookupForExecution = bucket.Lookup.Clone();

                            // Push the lookup stack up one so that we are only modifying items and properties in that scope.
                            entryForInference = lookupForInference.EnterScope("ExecuteTarget() Inference");
                            entryForExecution = lookupForExecution.EnterScope("ExecuteTarget() Execution");

                            // if we're doing an incremental build, we need to effectively run the task twice -- once
                            // to infer the outputs for up-to-date input items, and once to actually execute the task;
                            // as a result we need separate sets of item and property collections to track changes
                            if (dependencyResult == DependencyAnalysisResult.IncrementalBuild)
                            {
                                // subset the relevant items to those that are up-to-date
                                foreach (string itemType in upToDateTargetInputs.ItemTypes)
                                {
                                    lookupForInference.PopulateWithItems(itemType, upToDateTargetInputs[itemType]);
                                }

                                // subset the relevant items to those that have changed
                                foreach (string itemType in changedTargetInputs.ItemTypes)
                                {
                                    lookupForExecution.PopulateWithItems(itemType, changedTargetInputs[itemType]);
                                }
                            }

                            // We either have some work to do or at least we need to infer outputs from inputs.
                            bucketResult = await ProcessBucket(taskBuilder, targetLoggingContext, GetTaskExecutionMode(dependencyResult), lookupForInference, lookupForExecution);

                            // Now aggregate the result with the existing known results.  There are four rules, assuming the target was not
                            // skipped due to being up-to-date:
                            // 1. If this bucket failed or was cancelled, the aggregate result is failure.
                            // 2. If this bucket Succeeded and we have not previously failed, the aggregate result is a success.
                            // 3. Otherwise, the bucket was skipped, which has no effect on the aggregate result.
                            // 4. If the bucket's action code says to stop, then we stop, regardless of the success or failure state.
                            if (dependencyResult != DependencyAnalysisResult.SkipUpToDate)
                            {
                                aggregateResult = aggregateResult.AggregateResult(bucketResult);
                            }
                            else
                            {
                                if (aggregateResult.ResultCode == WorkUnitResultCode.Skipped)
                                {
                                    aggregateResult = aggregateResult.AggregateResult(new WorkUnitResult(WorkUnitResultCode.Success, WorkUnitActionCode.Continue, null));
                                }
                            }

                            // Pop the lookup scopes, causing them to collapse their values back down into the
                            // bucket's lookup.
                            // NOTE: this order is important because when we infer outputs, we are trying
                            // to produce the same results as would be produced from a full build; as such
                            // if we're doing both the infer and execute steps, we want the outputs from
                            // the execute step to override the outputs of the infer step -- this models
                            // the full build scenario more correctly than if the steps were reversed
                            entryForInference.LeaveScope();
                            entryForInference = null;
                            entryForExecution.LeaveScope();
                            entryForExecution = null;
                            targetSuccess     = (bucketResult != null) && (bucketResult.ResultCode == WorkUnitResultCode.Success);
                            break;

                        case DependencyAnalysisResult.SkipNoInputs:
                        case DependencyAnalysisResult.SkipNoOutputs:
                            // We have either no inputs or no outputs, so there is nothing to do.
                            targetSuccess = true;
                            break;
                        }
                    }
                    catch (InvalidProjectFileException e)
                    {
                        // Make sure the Invalid Project error gets logged *before* TargetFinished.  Otherwise,
                        // the log is confusing.
                        targetLoggingContext.LogInvalidProjectFileError(e);

                        if (null != entryForInference)
                        {
                            entryForInference.LeaveScope();
                        }

                        if (null != entryForExecution)
                        {
                            entryForExecution.LeaveScope();
                        }

                        aggregateResult = aggregateResult.AggregateResult(new WorkUnitResult(WorkUnitResultCode.Failed, WorkUnitActionCode.Stop, null));
                    }
                    finally
                    {
                        // Don't log the last target finished event until we can process the target outputs as we want to attach them to the
                        // last target batch.
                        if (targetLoggingContext != null && i < numberOfBuckets - 1)
                        {
                            targetLoggingContext.LogTargetBatchFinished(projectFullPath, targetSuccess, null);
                            targetLoggingContext = null;
                        }
                    }
                }

                // Produce the final results.
                List <TaskItem> targetOutputItems = new List <TaskItem>();

                try
                {
                    // If any legacy CallTarget operations took place, integrate them back in to the main lookup now.
                    LeaveLegacyCallTargetScopes();

                    // Publish the items for each bucket back into the baseLookup.  Note that EnterScope() was actually called on each
                    // bucket inside of the ItemBucket constructor, which is why you don't see it anywhere around here.
                    foreach (ItemBucket bucket in buckets)
                    {
                        bucket.LeaveScope();
                    }

                    string          targetReturns         = _target.Returns;
                    ElementLocation targetReturnsLocation = _target.ReturnsLocation;

                    // If there are no targets in the project file that use the "Returns" attribute, that means that we
                    // revert to the legacy behavior in the case where Returns is not specified (null, rather
                    // than the empty string, which indicates no returns).  Legacy behavior is for all
                    // of the target's Outputs to be returned.
                    // On the other hand, if there is at least one target in the file that uses the Returns attribute,
                    // then all targets in the file are run according to the new behaviour (return nothing unless otherwise
                    // specified by the Returns attribute).
                    if (targetReturns == null)
                    {
                        if (!_target.ParentProjectSupportsReturnsAttribute)
                        {
                            targetReturns         = _target.Outputs;
                            targetReturnsLocation = _target.OutputsLocation;
                        }
                    }

                    if (!String.IsNullOrEmpty(targetReturns))
                    {
                        // Determine if we should keep duplicates.
                        bool keepDupes = ConditionEvaluator.EvaluateCondition
                                         (
                            _target.KeepDuplicateOutputs,
                            ParserOptions.AllowPropertiesAndItemLists,
                            _expander,
                            ExpanderOptions.ExpandPropertiesAndItems,
                            requestEntry.ProjectRootDirectory,
                            _target.KeepDuplicateOutputsLocation,
                            projectLoggingContext.LoggingService,
                            projectLoggingContext.BuildEventContext
                                         );

                        // NOTE: we need to gather the outputs in batches, because the output specification may reference item metadata
                        // Also, we are using the baseLookup, which has possibly had changes made to it since the project started.  Because of this, the
                        // set of outputs calculated here may differ from those which would have been calculated at the beginning of the target.  It is
                        // assumed the user intended this.
                        List <ItemBucket> batchingBuckets = BatchingEngine.PrepareBatchingBuckets(GetBatchableParametersForTarget(), _baseLookup, _target.Location);

                        if (keepDupes)
                        {
                            foreach (ItemBucket bucket in batchingBuckets)
                            {
                                targetOutputItems.AddRange(bucket.Expander.ExpandIntoTaskItemsLeaveEscaped(targetReturns, ExpanderOptions.ExpandAll, targetReturnsLocation));
                            }
                        }
                        else
                        {
                            HashSet <TaskItem> addedItems = new HashSet <TaskItem>();
                            foreach (ItemBucket bucket in batchingBuckets)
                            {
                                IList <TaskItem> itemsToAdd = bucket.Expander.ExpandIntoTaskItemsLeaveEscaped(targetReturns, ExpanderOptions.ExpandAll, targetReturnsLocation);

                                foreach (TaskItem item in itemsToAdd)
                                {
                                    if (!addedItems.Contains(item))
                                    {
                                        targetOutputItems.Add(item);
                                        addedItems.Add(item);
                                    }
                                }
                            }
                        }
                    }
                }
                finally
                {
                    if (targetLoggingContext != null)
                    {
                        // log the last target finished since we now have the target outputs.
                        targetLoggingContext.LogTargetBatchFinished(projectFullPath, targetSuccess, targetOutputItems != null && targetOutputItems.Count > 0 ? targetOutputItems : null);
                    }
                }

                _targetResult = new TargetResult(targetOutputItems.ToArray(), aggregateResult);

                if (aggregateResult.ResultCode == WorkUnitResultCode.Failed && aggregateResult.ActionCode == WorkUnitActionCode.Stop)
                {
                    _state = TargetEntryState.ErrorExecution;
                }
                else
                {
                    _state = TargetEntryState.Completed;
                }
            }
            finally
            {
                _isExecuting = false;
            }
#if MSBUILDENABLEVSPROFILING
        }

        finally
        {
            string endTargetBuild = String.Format(CultureInfo.CurrentCulture, "Build Target {0} in Project {1} - End", this.Name, projectFullPath);
            DataCollection.CommentMarkProfile(8801, endTargetBuild);
        }
#endif
        }
Exemple #4
0
        /// <summary>
        /// Retrieves the list of dependencies this target needs to have built and moves the target to the next state.
        /// Never returns null.
        /// </summary>
        /// <returns>A collection of targets on which this target depends.</returns>
        internal List <TargetSpecification> GetDependencies(ProjectLoggingContext projectLoggingContext)
        {
            VerifyState(_state, TargetEntryState.Dependencies);

            // Resolve the target now, since from this point on we are going to be doing work with the actual instance.
            GetTargetInstance();

            // We first make sure no batching was attempted with the target's condition.
            // UNDONE: (Improvement) We want to allow this actually.  In order to do this we need to determine what the
            // batching buckets are, and if there are any which aren't empty, return our list of dependencies.
            // Only in the case where all bucket conditions fail do we want to skip the target entirely (and
            // this skip building the dependencies.)
            if (ExpressionShredder.ContainsMetadataExpressionOutsideTransform(_target.Condition))
            {
                ProjectErrorUtilities.ThrowInvalidProject(_target.ConditionLocation, "TargetConditionHasInvalidMetadataReference", _target.Name, _target.Condition);
            }

            // If condition is false (based on propertyBag), set this target's state to
            // "Skipped" since we won't actually build it.
            bool condition = ConditionEvaluator.EvaluateCondition
                             (
                _target.Condition,
                ParserOptions.AllowPropertiesAndItemLists,
                _expander,
                ExpanderOptions.ExpandPropertiesAndItems,
                _requestEntry.ProjectRootDirectory,
                _target.ConditionLocation,
                projectLoggingContext.LoggingService,
                projectLoggingContext.BuildEventContext
                             );

            if (!condition)
            {
                _targetResult = new TargetResult(new TaskItem[0] {
                }, new WorkUnitResult(WorkUnitResultCode.Skipped, WorkUnitActionCode.Continue, null));
                _state        = TargetEntryState.Completed;

                if (!projectLoggingContext.LoggingService.OnlyLogCriticalEvents)
                {
                    // Expand the expression for the Log.
                    string expanded = _expander.ExpandIntoStringAndUnescape(_target.Condition, ExpanderOptions.ExpandPropertiesAndItems, _target.ConditionLocation);

                    // By design: Not building dependencies. This is what NAnt does too.
                    // NOTE: In the original code, this was logged from the target logging context.  However, the target
                    // hadn't been "started" by then, so you'd get a target message outside the context of a started
                    // target.  In the Task builder (and original Task Engine), a Task Skipped message would be logged in
                    // the context of the target, not the task.  This should be the same, especially given that we
                    // wish to allow batching on the condition of a target.
                    projectLoggingContext.LogComment(MessageImportance.Low, "TargetSkippedFalseCondition", _target.Name, _target.Condition, expanded);
                }

                return(new List <TargetSpecification>());
            }

            IList <string>             dependencies      = _expander.ExpandIntoStringListLeaveEscaped(_target.DependsOnTargets, ExpanderOptions.ExpandPropertiesAndItems, _target.DependsOnTargetsLocation);
            List <TargetSpecification> dependencyTargets = new List <TargetSpecification>(dependencies.Count);

            foreach (string escapedDependency in dependencies)
            {
                string dependencyTargetName = EscapingUtilities.UnescapeAll(escapedDependency);
                dependencyTargets.Add(new TargetSpecification(dependencyTargetName, _target.DependsOnTargetsLocation));
            }

            _state = TargetEntryState.Execution;

            return(dependencyTargets);
        }
        internal override void ExecuteTask(Lookup lookup)
        {
            foreach (ProjectItemGroupTaskItemInstance child in _taskInstance.Items)
            {
                List <ItemBucket> buckets = null;

                try
                {
                    List <string> parameterValues = new List <string>();
                    GetBatchableValuesFromBuildItemGroupChild(parameterValues, child);
                    buckets = BatchingEngine.PrepareBatchingBuckets(parameterValues, lookup, child.ItemType, _taskInstance.Location);

                    // "Execute" each bucket
                    foreach (ItemBucket bucket in buckets)
                    {
                        bool condition = ConditionEvaluator.EvaluateCondition
                                         (
                            child.Condition,
                            ParserOptions.AllowAll,
                            bucket.Expander,
                            ExpanderOptions.ExpandAll,
                            Project.Directory,
                            child.ConditionLocation,
                            LoggingContext.LoggingService,
                            LoggingContext.BuildEventContext,
                            FileSystems.Default);

                        if (condition)
                        {
                            HashSet <string>       keepMetadata           = null;
                            HashSet <string>       removeMetadata         = null;
                            HashSet <string>       matchOnMetadata        = null;
                            MatchOnMetadataOptions matchOnMetadataOptions = MatchOnMetadataConstants.MatchOnMetadataOptionsDefaultValue;

                            if (!String.IsNullOrEmpty(child.KeepMetadata))
                            {
                                var keepMetadataEvaluated = bucket.Expander.ExpandIntoStringListLeaveEscaped(child.KeepMetadata, ExpanderOptions.ExpandAll, child.KeepMetadataLocation).ToList();
                                if (keepMetadataEvaluated.Count > 0)
                                {
                                    keepMetadata = new HashSet <string>(keepMetadataEvaluated);
                                }
                            }

                            if (!String.IsNullOrEmpty(child.RemoveMetadata))
                            {
                                var removeMetadataEvaluated = bucket.Expander.ExpandIntoStringListLeaveEscaped(child.RemoveMetadata, ExpanderOptions.ExpandAll, child.RemoveMetadataLocation).ToList();
                                if (removeMetadataEvaluated.Count > 0)
                                {
                                    removeMetadata = new HashSet <string>(removeMetadataEvaluated);
                                }
                            }

                            if (!String.IsNullOrEmpty(child.MatchOnMetadata))
                            {
                                var matchOnMetadataEvaluated = bucket.Expander.ExpandIntoStringListLeaveEscaped(child.MatchOnMetadata, ExpanderOptions.ExpandAll, child.MatchOnMetadataLocation).ToList();
                                if (matchOnMetadataEvaluated.Count > 0)
                                {
                                    matchOnMetadata = new HashSet <string>(matchOnMetadataEvaluated);
                                }

                                Enum.TryParse(child.MatchOnMetadataOptions, out matchOnMetadataOptions);
                            }

                            if ((child.Include.Length != 0) ||
                                (child.Exclude.Length != 0))
                            {
                                // It's an item -- we're "adding" items to the world
                                ExecuteAdd(child, bucket, keepMetadata, removeMetadata);
                            }
                            else if (child.Remove.Length != 0)
                            {
                                // It's a remove -- we're "removing" items from the world
                                ExecuteRemove(child, bucket, matchOnMetadata, matchOnMetadataOptions);
                            }
                            else
                            {
                                // It's a modify -- changing existing items
                                ExecuteModify(child, bucket, keepMetadata, removeMetadata);
                            }
                        }
                    }
                }
                finally
                {
                    if (buckets != null)
                    {
                        // Propagate the item changes to the bucket above
                        foreach (ItemBucket bucket in buckets)
                        {
                            bucket.LeaveScope();
                        }
                    }
                }
            }
        }
        private void ExecuteAdd(ProjectItemGroupTaskItemInstance child, ItemBucket bucket, ISet <string> keepMetadata, ISet <string> removeMetadata)
        {
            // First, collect up the appropriate metadata collections.  We need the one from the item definition, if any, and
            // the one we are using for this batching bucket.
            ProjectItemDefinitionInstance itemDefinition;

            Project.ItemDefinitions.TryGetValue(child.ItemType, out itemDefinition);

            // The NestedMetadataTable will handle the aggregation of the different metadata collections
            NestedMetadataTable metadataTable         = new NestedMetadataTable(child.ItemType, bucket.Expander.Metadata, itemDefinition);
            IMetadataTable      originalMetadataTable = bucket.Expander.Metadata;

            bucket.Expander.Metadata = metadataTable;

            // Second, expand the item include and exclude, and filter existing metadata as appropriate.
            List <ProjectItemInstance> itemsToAdd = ExpandItemIntoItems(child, bucket.Expander, keepMetadata, removeMetadata);

            // Third, expand the metadata.
            foreach (ProjectItemGroupTaskMetadataInstance metadataInstance in child.Metadata)
            {
                bool condition = ConditionEvaluator.EvaluateCondition
                                 (
                    metadataInstance.Condition,
                    ParserOptions.AllowAll,
                    bucket.Expander,
                    ExpanderOptions.ExpandAll,
                    Project.Directory,
                    metadataInstance.Location,
                    LoggingContext.LoggingService,
                    LoggingContext.BuildEventContext,
                    FileSystems.Default);

                if (condition)
                {
                    string evaluatedValue = bucket.Expander.ExpandIntoStringLeaveEscaped(metadataInstance.Value, ExpanderOptions.ExpandAll, metadataInstance.Location);

                    // This both stores the metadata so we can add it to all the items we just created later, and
                    // exposes this metadata to further metadata evaluations in subsequent loop iterations.
                    metadataTable.SetValue(metadataInstance.Name, evaluatedValue);
                }
            }

            // Finally, copy the added metadata onto the new items.  The set call is additive.
            ProjectItemInstance.SetMetadata(metadataTable.AddedMetadata, itemsToAdd); // Add in one operation for potential copy-on-write

            // Restore the original metadata table.
            bucket.Expander.Metadata = originalMetadataTable;

            // Determine if we should NOT add duplicate entries
            bool keepDuplicates = ConditionEvaluator.EvaluateCondition
                                  (
                child.KeepDuplicates,
                ParserOptions.AllowAll,
                bucket.Expander,
                ExpanderOptions.ExpandAll,
                Project.Directory,
                child.KeepDuplicatesLocation,
                LoggingContext.LoggingService,
                LoggingContext.BuildEventContext,
                FileSystems.Default);

            if (LogTaskInputs && !LoggingContext.LoggingService.OnlyLogCriticalEvents && itemsToAdd?.Count > 0)
            {
                ItemGroupLoggingHelper.LogTaskParameter(
                    LoggingContext,
                    TaskParameterMessageKind.AddItem,
                    child.ItemType,
                    itemsToAdd,
                    logItemMetadata: true,
                    child.Location);
            }

            // Now add the items we created to the lookup.
            bucket.Lookup.AddNewItemsOfItemType(child.ItemType, itemsToAdd, !keepDuplicates); // Add in one operation for potential copy-on-write
        }
        public void GetBuckets()
        {
            ProjectInstance project    = ProjectHelpers.CreateEmptyProjectInstance();
            List <string>   parameters = new List <string>();

            parameters.Add("@(File);$(unittests)");
            parameters.Add("$(obj)\\%(Filename).ext");
            parameters.Add("@(File->'%(extension)')");  // attributes in transforms don't affect batching

            ItemDictionary <ProjectItemInstance> itemsByType = new ItemDictionary <ProjectItemInstance>();

            IList <ProjectItemInstance> items = new List <ProjectItemInstance>();

            items.Add(new ProjectItemInstance(project, "File", "a.foo", project.FullPath));
            items.Add(new ProjectItemInstance(project, "File", "b.foo", project.FullPath));
            items.Add(new ProjectItemInstance(project, "File", "c.foo", project.FullPath));
            items.Add(new ProjectItemInstance(project, "File", "d.foo", project.FullPath));
            items.Add(new ProjectItemInstance(project, "File", "e.foo", project.FullPath));
            itemsByType.ImportItems(items);

            items = new List <ProjectItemInstance>();
            items.Add(new ProjectItemInstance(project, "Doc", "a.doc", project.FullPath));
            items.Add(new ProjectItemInstance(project, "Doc", "b.doc", project.FullPath));
            items.Add(new ProjectItemInstance(project, "Doc", "c.doc", project.FullPath));
            items.Add(new ProjectItemInstance(project, "Doc", "d.doc", project.FullPath));
            items.Add(new ProjectItemInstance(project, "Doc", "e.doc", project.FullPath));
            itemsByType.ImportItems(items);

            PropertyDictionary <ProjectPropertyInstance> properties = new PropertyDictionary <ProjectPropertyInstance>();

            properties.Set(ProjectPropertyInstance.Create("UnitTests", "unittests.foo"));
            properties.Set(ProjectPropertyInstance.Create("OBJ", "obj"));

            List <ItemBucket> buckets = BatchingEngine.PrepareBatchingBuckets(parameters, CreateLookup(itemsByType, properties), MockElementLocation.Instance);

            Assert.Equal(5, buckets.Count);

            foreach (ItemBucket bucket in buckets)
            {
                // non-batching data -- same for all buckets
                XmlAttribute tempXmlAttribute = (new XmlDocument()).CreateAttribute("attrib");
                tempXmlAttribute.Value = "'$(Obj)'=='obj'";

                Assert.True(ConditionEvaluator.EvaluateCondition(tempXmlAttribute.Value, ParserOptions.AllowAll, bucket.Expander, ExpanderOptions.ExpandAll, Directory.GetCurrentDirectory(), MockElementLocation.Instance, null, new BuildEventContext(1, 2, 3, 4)));
                Assert.Equal("a.doc;b.doc;c.doc;d.doc;e.doc", bucket.Expander.ExpandIntoStringAndUnescape("@(doc)", ExpanderOptions.ExpandItems, MockElementLocation.Instance));
                Assert.Equal("unittests.foo", bucket.Expander.ExpandIntoStringAndUnescape("$(bogus)$(UNITTESTS)", ExpanderOptions.ExpandPropertiesAndMetadata, MockElementLocation.Instance));
            }

            Assert.Equal("a.foo", buckets[0].Expander.ExpandIntoStringAndUnescape("@(File)", ExpanderOptions.ExpandItems, MockElementLocation.Instance));
            Assert.Equal(".foo", buckets[0].Expander.ExpandIntoStringAndUnescape("@(File->'%(Extension)')", ExpanderOptions.ExpandItems, MockElementLocation.Instance));
            Assert.Equal("obj\\a.ext", buckets[0].Expander.ExpandIntoStringAndUnescape("$(obj)\\%(Filename).ext", ExpanderOptions.ExpandPropertiesAndMetadata, MockElementLocation.Instance));

            // we weren't batching on this attribute, so it has no value
            Assert.Equal(String.Empty, buckets[0].Expander.ExpandIntoStringAndUnescape("%(Extension)", ExpanderOptions.ExpandAll, MockElementLocation.Instance));

            ProjectItemInstanceFactory factory = new ProjectItemInstanceFactory(project, "i");

            items = buckets[0].Expander.ExpandIntoItemsLeaveEscaped("@(file)", factory, ExpanderOptions.ExpandItems, MockElementLocation.Instance);
            Assert.NotNull(items);
            Assert.Equal(1, items.Count);

            int invalidProjectFileExceptions = 0;

            try
            {
                // This should throw because we don't allow item lists to be concatenated
                // with other strings.
                bool throwAway;
                items = buckets[0].Expander.ExpandSingleItemVectorExpressionIntoItems("@(file)$(unitests)", factory, ExpanderOptions.ExpandItems, false /* no nulls */, out throwAway, MockElementLocation.Instance);
            }
            catch (InvalidProjectFileException ex)
            {
                // check we don't lose error codes from IPFE's during build
                Assert.Equal(ex.ErrorCode, "MSB4012");
                invalidProjectFileExceptions++;
            }

            // We do allow separators in item vectors, this results in an item group with a single flattened item
            items = buckets[0].Expander.ExpandIntoItemsLeaveEscaped("@(file, ',')", factory, ExpanderOptions.ExpandItems, MockElementLocation.Instance);
            Assert.NotNull(items);
            Assert.Equal(1, items.Count);
            Assert.Equal("a.foo", items[0].EvaluatedInclude);

            Assert.Equal(1, invalidProjectFileExceptions);
        }
        /// <summary>
        /// Execute a PropertyGroup element, including each child property
        /// </summary>
        /// <param name="lookup">The lookup use for evaluation and as a destination for these properties.</param>
        internal override void ExecuteTask(Lookup lookup)
        {
            foreach (ProjectPropertyGroupTaskPropertyInstance property in _taskInstance.Properties)
            {
                List <ItemBucket> buckets = null;

                try
                {
                    // Find all the metadata references in order to create buckets
                    List <string> parameterValues = new List <string>();
                    GetBatchableValuesFromProperty(parameterValues, property);
                    buckets = BatchingEngine.PrepareBatchingBuckets(parameterValues, lookup, property.Location);

                    // "Execute" each bucket
                    foreach (ItemBucket bucket in buckets)
                    {
                        bool condition = ConditionEvaluator.EvaluateCondition
                                         (
                            property.Condition,
                            ParserOptions.AllowAll,
                            bucket.Expander,
                            ExpanderOptions.ExpandAll,
                            Project.Directory,
                            property.ConditionLocation,
                            LoggingContext.LoggingService,
                            LoggingContext.BuildEventContext,
                            FileSystems.Default);

                        if (condition)
                        {
                            // Check for a reserved name now, so it fails right here instead of later when the property eventually reaches
                            // the outer scope.
                            ProjectErrorUtilities.VerifyThrowInvalidProject
                            (
                                !ReservedPropertyNames.IsReservedProperty(property.Name),
                                property.Location,
                                "CannotModifyReservedProperty",
                                property.Name
                            );

                            string evaluatedValue = bucket.Expander.ExpandIntoStringLeaveEscaped(property.Value, ExpanderOptions.ExpandAll, property.Location);

                            if (LogTaskInputs && !LoggingContext.LoggingService.OnlyLogCriticalEvents)
                            {
                                LoggingContext.LogComment(MessageImportance.Low, "PropertyGroupLogMessage", property.Name, evaluatedValue);
                            }

                            bucket.Lookup.SetProperty(ProjectPropertyInstance.Create(property.Name, evaluatedValue, property.Location, Project.IsImmutable));
                        }
                    }
                }
                finally
                {
                    if (buckets != null)
                    {
                        // Propagate the property changes to the bucket above
                        foreach (ItemBucket bucket in buckets)
                        {
                            bucket.LeaveScope();
                        }
                    }
                }
            }
        }