Example #1
0
        public void MultiInputItemsThatCorrelatesWithMultipleTransformOutputItems()
        {
            Console.WriteLine("MultiInputItemsThatCorrelatesWithMultipleTransformOutputItems");
            ProjectInstance project = ProjectHelpers.CreateEmptyProjectInstance();
            string          inputs  = "@(Items);@(MoreItems)";
            string          outputs = "@(Items->'%(Filename).dll');@(MoreItems->'%(Filename).xml')";

            FileWriteInfo[] filesToAnalyze = new FileWriteInfo[]
            {
                new FileWriteInfo("a.cs", _yesterday),
                new FileWriteInfo("a.txt", _yesterday),
                new FileWriteInfo("a.dll", _today),
                new FileWriteInfo("a.xml", _today),
                new FileWriteInfo("b.cs", _yesterday),
                new FileWriteInfo("b.txt", _yesterday),
                new FileWriteInfo("b.dll", _twoDaysAgo),
                new FileWriteInfo("b.xml", _today),
                new FileWriteInfo("c.cs", _yesterday),
                new FileWriteInfo("c.txt", _yesterday),
                new FileWriteInfo("c.dll", _today),
                new FileWriteInfo("c.xml", _today)
            };

            List <ProjectItemInstance> items = new List <ProjectItemInstance>();

            items.Add(new ProjectItemInstance(project, "Items", "a.cs", project.FullPath));
            items.Add(new ProjectItemInstance(project, "Items", "b.cs", project.FullPath));
            items.Add(new ProjectItemInstance(project, "Items", "c.cs", project.FullPath));
            items.Add(new ProjectItemInstance(project, "MoreItems", "a.txt", project.FullPath));
            items.Add(new ProjectItemInstance(project, "MoreItems", "b.txt", project.FullPath));
            items.Add(new ProjectItemInstance(project, "MoreItems", "c.txt", project.FullPath));

            ItemDictionary <ProjectItemInstance> itemsByName = new ItemDictionary <ProjectItemInstance>();

            itemsByName.ImportItems(items);

            ItemDictionary <ProjectItemInstance> changedTargetInputs  = new ItemDictionary <ProjectItemInstance>();
            ItemDictionary <ProjectItemInstance> upToDateTargetInputs = new ItemDictionary <ProjectItemInstance>();
            DependencyAnalysisResult             result = PerformDependencyAnalysisTestHelper(filesToAnalyze, itemsByName, inputs, outputs, out changedTargetInputs, out upToDateTargetInputs);

            foreach (ProjectItemInstance itemInstance in changedTargetInputs)
            {
                Console.WriteLine("Changed: {0}:{1}", itemInstance.ItemType, itemInstance.EvaluatedInclude);
            }

            Assert.Equal(DependencyAnalysisResult.IncrementalBuild, result); // "Should only build partially."

            // Even though they were all up to date, we still expect to see an empty marker
            // so that lookups can correctly *not* find items of that type
            Assert.True(changedTargetInputs.HasEmptyMarker("MoreItems"));
        }
Example #2
0
        /// <summary>
        /// Gets the task execution mode based
        /// </summary>
        /// <param name="analysis">The result of the up-to-date check.</param>
        /// <returns>The mode to be used to execute tasks.</returns>
        private TaskExecutionMode GetTaskExecutionMode(DependencyAnalysisResult analysis)
        {
            TaskExecutionMode executionMode;

            if ((analysis == DependencyAnalysisResult.SkipUpToDate) ||
                (analysis == DependencyAnalysisResult.IncrementalBuild))
            {
                executionMode = TaskExecutionMode.InferOutputsOnly;
            }
            else
            {
                executionMode = TaskExecutionMode.ExecuteTaskAndGatherOutputs;
            }

            // Execute the task using the items that need to be (re)built
            if ((analysis == DependencyAnalysisResult.FullBuild) ||
                (analysis == DependencyAnalysisResult.IncrementalBuild))
            {
                executionMode = executionMode | TaskExecutionMode.ExecuteTaskAndGatherOutputs;
            }

            return(executionMode);
        }
        /// <summary>
        /// Does appropriate logging to indicate why this target is being built fully or partially.
        /// </summary>
        /// <param name="result"></param>
        private void LogReasonForBuildingTarget(DependencyAnalysisResult result)
        {
            // Only if we are not logging just critical events should we be logging the details
            if (!loggingService.OnlyLogCriticalEvents)
            {
                if (result == DependencyAnalysisResult.FullBuild && this.dependencyAnalysisDetail.Count > 0)
                {
                    // For the full build decision the are three possible outcomes
                    loggingService.LogComment(buildEventContext,"BuildTargetCompletely", this.targetToAnalyze.Name);

                    foreach (DependencyAnalysisLogDetail logDetail in this.dependencyAnalysisDetail)
                    {
                        string reason = GetFullBuildReason(logDetail);
                        loggingService.LogCommentFromText(buildEventContext, MessageImportance.Low, reason);
                    }
                }
                else if (result == DependencyAnalysisResult.IncrementalBuild)
                {
                    // For the partial build decision the are three possible outcomes
                    loggingService.LogComment(buildEventContext, MessageImportance.Normal, "BuildTargetPartially", this.targetToAnalyze.Name);
                    foreach (DependencyAnalysisLogDetail logDetail in this.dependencyAnalysisDetail)
                    {
                        string reason = GetIncrementalBuildReason(logDetail);
                        loggingService.LogCommentFromText(buildEventContext, MessageImportance.Low, reason);
                    }
                }
            }
        }
Example #4
0
        /// <summary>
        /// Runs all of the tasks for this target, batched as necessary.
        /// </summary>
        internal async Task ExecuteTarget(ITaskBuilder taskBuilder, BuildRequestEntry requestEntry, ProjectLoggingContext projectLoggingContext, CancellationToken cancellationToken)
        {
#if MSBUILDENABLEVSPROFILING
            try
            {
                string beginTargetBuild = String.Format(CultureInfo.CurrentCulture, "Build Target {0} in Project {1} - Start", this.Name, projectFullPath);
                DataCollection.CommentMarkProfile(8800, beginTargetBuild);
#endif

            try
            {
                VerifyState(_state, TargetEntryState.Execution);
                ErrorUtilities.VerifyThrow(!_isExecuting, "Target {0} is already executing", _target.Name);
                _cancellationToken = cancellationToken;
                _isExecuting       = true;

                // Generate the batching buckets.  Note that each bucket will get a lookup based on the baseLookup.  This lookup will be in its
                // own scope, which we will collapse back down into the baseLookup at the bottom of the function.
                List <ItemBucket> buckets = BatchingEngine.PrepareBatchingBuckets(GetBatchableParametersForTarget(), _baseLookup, _target.Location);

                WorkUnitResult       aggregateResult      = new WorkUnitResult();
                TargetLoggingContext targetLoggingContext = null;
                bool   targetSuccess   = false;
                int    numberOfBuckets = buckets.Count;
                string projectFullPath = requestEntry.RequestConfiguration.ProjectFullPath;

                string parentTargetName = null;
                if (ParentEntry != null && ParentEntry.Target != null)
                {
                    parentTargetName = ParentEntry.Target.Name;
                }

                for (int i = 0; i < numberOfBuckets; i++)
                {
                    ItemBucket bucket = buckets[i];

                    // If one of the buckets failed, stop building.
                    if (aggregateResult.ActionCode == WorkUnitActionCode.Stop)
                    {
                        break;
                    }

                    targetLoggingContext = projectLoggingContext.LogTargetBatchStarted(projectFullPath, _target, parentTargetName);
                    WorkUnitResult bucketResult = null;
                    targetSuccess = false;

                    Lookup.Scope entryForInference = null;
                    Lookup.Scope entryForExecution = null;

                    try
                    {
                        // This isn't really dependency analysis.  This is up-to-date checking.  Based on this we will be able to determine if we should
                        // run tasks in inference or execution mode (or both) or just skip them altogether.
                        ItemDictionary <ProjectItemInstance> changedTargetInputs;
                        ItemDictionary <ProjectItemInstance> upToDateTargetInputs;
                        Lookup lookupForInference;
                        Lookup lookupForExecution;

                        // UNDONE: (Refactor) Refactor TargetUpToDateChecker to take a logging context, not a logging service.
                        TargetUpToDateChecker    dependencyAnalyzer = new TargetUpToDateChecker(requestEntry.RequestConfiguration.Project, _target, targetLoggingContext.LoggingService, targetLoggingContext.BuildEventContext);
                        DependencyAnalysisResult dependencyResult   = dependencyAnalyzer.PerformDependencyAnalysis(bucket, out changedTargetInputs, out upToDateTargetInputs);

                        switch (dependencyResult)
                        {
                        // UNDONE: Need to enter/leave debugger scope properly for the <Target> element.
                        case DependencyAnalysisResult.FullBuild:
                        case DependencyAnalysisResult.IncrementalBuild:
                        case DependencyAnalysisResult.SkipUpToDate:
                            // Create the lookups used to hold the current set of properties and items
                            lookupForInference = bucket.Lookup;
                            lookupForExecution = bucket.Lookup.Clone();

                            // Push the lookup stack up one so that we are only modifying items and properties in that scope.
                            entryForInference = lookupForInference.EnterScope("ExecuteTarget() Inference");
                            entryForExecution = lookupForExecution.EnterScope("ExecuteTarget() Execution");

                            // if we're doing an incremental build, we need to effectively run the task twice -- once
                            // to infer the outputs for up-to-date input items, and once to actually execute the task;
                            // as a result we need separate sets of item and property collections to track changes
                            if (dependencyResult == DependencyAnalysisResult.IncrementalBuild)
                            {
                                // subset the relevant items to those that are up-to-date
                                foreach (string itemType in upToDateTargetInputs.ItemTypes)
                                {
                                    lookupForInference.PopulateWithItems(itemType, upToDateTargetInputs[itemType]);
                                }

                                // subset the relevant items to those that have changed
                                foreach (string itemType in changedTargetInputs.ItemTypes)
                                {
                                    lookupForExecution.PopulateWithItems(itemType, changedTargetInputs[itemType]);
                                }
                            }

                            // We either have some work to do or at least we need to infer outputs from inputs.
                            bucketResult = await ProcessBucket(taskBuilder, targetLoggingContext, GetTaskExecutionMode(dependencyResult), lookupForInference, lookupForExecution);

                            // Now aggregate the result with the existing known results.  There are four rules, assuming the target was not
                            // skipped due to being up-to-date:
                            // 1. If this bucket failed or was cancelled, the aggregate result is failure.
                            // 2. If this bucket Succeeded and we have not previously failed, the aggregate result is a success.
                            // 3. Otherwise, the bucket was skipped, which has no effect on the aggregate result.
                            // 4. If the bucket's action code says to stop, then we stop, regardless of the success or failure state.
                            if (dependencyResult != DependencyAnalysisResult.SkipUpToDate)
                            {
                                aggregateResult = aggregateResult.AggregateResult(bucketResult);
                            }
                            else
                            {
                                if (aggregateResult.ResultCode == WorkUnitResultCode.Skipped)
                                {
                                    aggregateResult = aggregateResult.AggregateResult(new WorkUnitResult(WorkUnitResultCode.Success, WorkUnitActionCode.Continue, null));
                                }
                            }

                            // Pop the lookup scopes, causing them to collapse their values back down into the
                            // bucket's lookup.
                            // NOTE: this order is important because when we infer outputs, we are trying
                            // to produce the same results as would be produced from a full build; as such
                            // if we're doing both the infer and execute steps, we want the outputs from
                            // the execute step to override the outputs of the infer step -- this models
                            // the full build scenario more correctly than if the steps were reversed
                            entryForInference.LeaveScope();
                            entryForInference = null;
                            entryForExecution.LeaveScope();
                            entryForExecution = null;
                            targetSuccess     = (bucketResult != null) && (bucketResult.ResultCode == WorkUnitResultCode.Success);
                            break;

                        case DependencyAnalysisResult.SkipNoInputs:
                        case DependencyAnalysisResult.SkipNoOutputs:
                            // We have either no inputs or no outputs, so there is nothing to do.
                            targetSuccess = true;
                            break;
                        }
                    }
                    catch (InvalidProjectFileException e)
                    {
                        // Make sure the Invalid Project error gets logged *before* TargetFinished.  Otherwise,
                        // the log is confusing.
                        targetLoggingContext.LogInvalidProjectFileError(e);

                        if (null != entryForInference)
                        {
                            entryForInference.LeaveScope();
                        }

                        if (null != entryForExecution)
                        {
                            entryForExecution.LeaveScope();
                        }

                        aggregateResult = aggregateResult.AggregateResult(new WorkUnitResult(WorkUnitResultCode.Failed, WorkUnitActionCode.Stop, null));
                    }
                    finally
                    {
                        // Don't log the last target finished event until we can process the target outputs as we want to attach them to the
                        // last target batch.
                        if (targetLoggingContext != null && i < numberOfBuckets - 1)
                        {
                            targetLoggingContext.LogTargetBatchFinished(projectFullPath, targetSuccess, null);
                            targetLoggingContext = null;
                        }
                    }
                }

                // Produce the final results.
                List <TaskItem> targetOutputItems = new List <TaskItem>();

                try
                {
                    // If any legacy CallTarget operations took place, integrate them back in to the main lookup now.
                    LeaveLegacyCallTargetScopes();

                    // Publish the items for each bucket back into the baseLookup.  Note that EnterScope() was actually called on each
                    // bucket inside of the ItemBucket constructor, which is why you don't see it anywhere around here.
                    foreach (ItemBucket bucket in buckets)
                    {
                        bucket.LeaveScope();
                    }

                    string          targetReturns         = _target.Returns;
                    ElementLocation targetReturnsLocation = _target.ReturnsLocation;

                    // If there are no targets in the project file that use the "Returns" attribute, that means that we
                    // revert to the legacy behavior in the case where Returns is not specified (null, rather
                    // than the empty string, which indicates no returns).  Legacy behavior is for all
                    // of the target's Outputs to be returned.
                    // On the other hand, if there is at least one target in the file that uses the Returns attribute,
                    // then all targets in the file are run according to the new behaviour (return nothing unless otherwise
                    // specified by the Returns attribute).
                    if (targetReturns == null)
                    {
                        if (!_target.ParentProjectSupportsReturnsAttribute)
                        {
                            targetReturns         = _target.Outputs;
                            targetReturnsLocation = _target.OutputsLocation;
                        }
                    }

                    if (!String.IsNullOrEmpty(targetReturns))
                    {
                        // Determine if we should keep duplicates.
                        bool keepDupes = ConditionEvaluator.EvaluateCondition
                                         (
                            _target.KeepDuplicateOutputs,
                            ParserOptions.AllowPropertiesAndItemLists,
                            _expander,
                            ExpanderOptions.ExpandPropertiesAndItems,
                            requestEntry.ProjectRootDirectory,
                            _target.KeepDuplicateOutputsLocation,
                            projectLoggingContext.LoggingService,
                            projectLoggingContext.BuildEventContext
                                         );

                        // NOTE: we need to gather the outputs in batches, because the output specification may reference item metadata
                        // Also, we are using the baseLookup, which has possibly had changes made to it since the project started.  Because of this, the
                        // set of outputs calculated here may differ from those which would have been calculated at the beginning of the target.  It is
                        // assumed the user intended this.
                        List <ItemBucket> batchingBuckets = BatchingEngine.PrepareBatchingBuckets(GetBatchableParametersForTarget(), _baseLookup, _target.Location);

                        if (keepDupes)
                        {
                            foreach (ItemBucket bucket in batchingBuckets)
                            {
                                targetOutputItems.AddRange(bucket.Expander.ExpandIntoTaskItemsLeaveEscaped(targetReturns, ExpanderOptions.ExpandAll, targetReturnsLocation));
                            }
                        }
                        else
                        {
                            HashSet <TaskItem> addedItems = new HashSet <TaskItem>();
                            foreach (ItemBucket bucket in batchingBuckets)
                            {
                                IList <TaskItem> itemsToAdd = bucket.Expander.ExpandIntoTaskItemsLeaveEscaped(targetReturns, ExpanderOptions.ExpandAll, targetReturnsLocation);

                                foreach (TaskItem item in itemsToAdd)
                                {
                                    if (!addedItems.Contains(item))
                                    {
                                        targetOutputItems.Add(item);
                                        addedItems.Add(item);
                                    }
                                }
                            }
                        }
                    }
                }
                finally
                {
                    if (targetLoggingContext != null)
                    {
                        // log the last target finished since we now have the target outputs.
                        targetLoggingContext.LogTargetBatchFinished(projectFullPath, targetSuccess, targetOutputItems != null && targetOutputItems.Count > 0 ? targetOutputItems : null);
                    }
                }

                _targetResult = new TargetResult(targetOutputItems.ToArray(), aggregateResult);

                if (aggregateResult.ResultCode == WorkUnitResultCode.Failed && aggregateResult.ActionCode == WorkUnitActionCode.Stop)
                {
                    _state = TargetEntryState.ErrorExecution;
                }
                else
                {
                    _state = TargetEntryState.Completed;
                }
            }
            finally
            {
                _isExecuting = false;
            }
#if MSBUILDENABLEVSPROFILING
        }

        finally
        {
            string endTargetBuild = String.Format(CultureInfo.CurrentCulture, "Build Target {0} in Project {1} - End", this.Name, projectFullPath);
            DataCollection.CommentMarkProfile(8801, endTargetBuild);
        }
#endif
        }
Example #5
0
        private void ProcessTaskOutputs(TaskExecutionContext executionContext)
        {
            // Get the success or failure
            if (targetBuildSuccessful)
            {
                if (!executionContext.TaskExecutedSuccessfully)
                {
                    targetBuildSuccessful = false;
                    // Check if the task threw an unhandled exception during its execution
                    if (executionContext.ThrownException != null)
                    {
                        // The stack trace for remote task InvalidProjectFileException can be ignored
                        // since it is not recorded and the exception will be caught inside the project
                        // class
                        if (executionContext.ThrownException is InvalidProjectFileException)
                        {
                            throw executionContext.ThrownException;
                        }
                        else
                        {
                            // The error occured outside of the user code (it may still be caused
                            // by bad user input), the build should be terminated. The exception
                            // will be logged as a fatal build error in engine. The exceptions caused
                            // by user code are converted into LogFatalTaskError messages by the TaskEngine
                            RemoteErrorException.Throw(executionContext.ThrownException, 
                                                       targetBuildEventContext, 
                                                       "RemoteErrorDuringTaskExecution",
                                                       parentProject.FullFileName, 
                                                       targetClass.Name);
                        }
                    }
                    // We need to disable the execution of the task if it was previously enabled,
                    // and if were only doing execution we can stop processing at the point the
                    // error occurred. If the task fails (which implies that ContinueOnError != 'true'), then do 
                    // not execute the remaining tasks because they may depend on the completion 
                    // of this task.
                    ErrorUtilities.VerifyThrow(howToBuild == DependencyAnalysisResult.FullBuild ||
                                                howToBuild == DependencyAnalysisResult.IncrementalBuild,
                                                "We can only see a failure for an execution stage");
                    if (howToBuild != DependencyAnalysisResult.FullBuild)
                        howToBuild = DependencyAnalysisResult.SkipUpToDate;
                    else
                        exitBatchDueToError = true;
                }
            }

            currentTask++;
        }
Example #6
0
        private void InitializeForRunningSingleTargetBatch()
        {
            // Verify that the target is in the right state
            ErrorUtilities.VerifyThrow(inProgressBuildState == InProgressBuildState.RunningTasks, "Wrong state");
            // Check if the current task number is valid
            ErrorUtilities.VerifyThrow(currentBucket < buckets.Count, "No buckets left");

            Hashtable changedTargetInputs = null;
            Hashtable upToDateTargetInputs = null;
            howToBuild = DependencyAnalysisResult.FullBuild;
            ItemBucket bucket = (ItemBucket)buckets[currentBucket];

            // For the first batch of a target use the targets original targetID. for each batch after the first one use a uniqueId to identity the target in the batch
            if (currentBucket != 0)
            {
                targetBuildEventContext = new BuildEventContext(targetBuildEventContext.NodeId, parentEngine.GetNextTargetId(), targetBuildEventContext.ProjectContextId, targetBuildEventContext.TaskId);
            }

            // Flag the start of the target.
            parentEngine.LoggingServices.LogTargetStarted(
                targetBuildEventContext,
                targetClass.Name,
                this.parentProject.FullFileName,
                targetClass.ProjectFileOfTargetElement);
            loggedTargetStart = true;

            // Figure out how we should build the target
            TargetDependencyAnalyzer dependencyAnalyzer = new TargetDependencyAnalyzer(parentProject.ProjectDirectory, targetClass, parentEngine.LoggingServices, targetBuildEventContext);
            howToBuild = dependencyAnalyzer.PerformDependencyAnalysis(bucket, out changedTargetInputs, out upToDateTargetInputs);

            targetBuildSuccessful = true;
            exitBatchDueToError = false;

            // If we need to build the target - initialize the data structure for
            // running the tasks
            if ((howToBuild != DependencyAnalysisResult.SkipNoInputs) &&
                (howToBuild != DependencyAnalysisResult.SkipNoOutputs))
            {
                // Within each target batch items are divided into lookup and execution; they must be 
                // kept separate: enforce this by cloning and entering scope
                lookupForInference = bucket.Lookup;
                lookupForExecution = bucket.Lookup.Clone();

                lookupForInference.EnterScope();
                lookupForExecution.EnterScope();

                // if we're doing an incremental build, we need to effectively run the task twice -- once
                // to infer the outputs for up-to-date input items, and once to actually execute the task;
                // as a result we need separate sets of item and property collections to track changes
                if (howToBuild == DependencyAnalysisResult.IncrementalBuild)    
                {
                    // subset the relevant items to those that are up-to-date
                    foreach (DictionaryEntry upToDateTargetInputsEntry in upToDateTargetInputs)
                    {
                        lookupForInference.PopulateWithItems((string)upToDateTargetInputsEntry.Key, (BuildItemGroup)upToDateTargetInputsEntry.Value);
                    }

                    // subset the relevant items to those that have changed
                    foreach (DictionaryEntry changedTargetInputsEntry in changedTargetInputs)
                    {
                        lookupForExecution.PopulateWithItems((string)changedTargetInputsEntry.Key, (BuildItemGroup)changedTargetInputsEntry.Value);
                    }
                }

                projectFileOfTaskNode = XmlUtilities.GetXmlNodeFile(targetElement, parentProject.FullFileName);

                // count the tasks in the target
                currentTask = 0;
                skippedNodeCount = 0;
            }
            else
            {
                currentTask = targetElement.ChildNodes.Count;
            }
        }