Exemplo n.º 1
0
        public static PipBuildRequest ToGrpc(this OpenBond.PipBuildRequest message)
        {
            var pipBuildRequest = new PipBuildRequest();

            foreach (var i in message.Hashes)
            {
                var fileArtifactKeyedHash = new FileArtifactKeyedHash()
                {
                    ContentHash      = i.ContentHash.Data.ToByteString(),
                    FileName         = i.FileName ?? string.Empty,
                    Length           = i.Length,
                    PathString       = i.PathString ?? string.Empty,
                    PathValue        = i.PathValue,
                    ReparsePointType = (FileArtifactKeyedHash.Types.GrpcReparsePointType)i.ReparsePointType,
                    RewriteCount     = i.RewriteCount,
                    IsSourceAffected = i.IsSourceAffected,
                };

                if (i.ReparsePointTarget != null)
                {
                    fileArtifactKeyedHash.ReparsePointTarget = i.ReparsePointTarget;
                }

                if (i.AssociatedDirectories != null)
                {
                    foreach (var j in i.AssociatedDirectories)
                    {
                        fileArtifactKeyedHash.AssociatedDirectories.Add(new GrpcDirectoryArtifact()
                        {
                            DirectoryPathValue      = j.DirectoryPathValue,
                            DirectorySealId         = j.DirectorySealId,
                            IsDirectorySharedOpaque = j.IsDirectorySharedOpaque,
                        });
                    }
                }

                pipBuildRequest.Hashes.Add(fileArtifactKeyedHash);
            }

            foreach (var i in message.Pips)
            {
                var singlePipBuildRequest = new SinglePipBuildRequest()
                {
                    ActivityId = i.ActivityId,
                    ExpectedPeakWorkingSetMb    = i.ExpectedPeakWorkingSetMb,
                    ExpectedAverageWorkingSetMb = i.ExpectedAverageWorkingSetMb,
                    ExpectedPeakCommitSizeMb    = i.ExpectedPeakCommitSizeMb,
                    ExpectedAverageCommitSizeMb = i.ExpectedAverageCommitSizeMb,
                    Fingerprint    = i.Fingerprint.Data.ToByteString(),
                    PipIdValue     = i.PipIdValue,
                    Priority       = i.Priority,
                    SequenceNumber = i.SequenceNumber,
                    Step           = i.Step
                };

                pipBuildRequest.Pips.Add(singlePipBuildRequest);
            }

            return(pipBuildRequest);
        }
Exemplo n.º 2
0
        private void ExtractHashes(RunnablePip runnable, List <FileArtifactKeyedHash> hashes)
        {
            var  step           = runnable.Step;
            bool requiresHashes = step == PipExecutionStep.MaterializeInputs ||
                                  step == PipExecutionStep.MaterializeOutputs ||
                                  step == PipExecutionStep.CacheLookup;

            if (!requiresHashes)
            {
                return;
            }

            var  environment          = runnable.Environment;
            bool materializingOutputs = step == PipExecutionStep.MaterializeOutputs;

            // The block below collects process input file artifacts and hashes
            // Currently there is no logic to keep from sending the same hashes twice
            // Consider a model where hashes for files are requested by worker
            using (var pooledFileSet = Pools.GetFileArtifactSet())
                using (var pooledDynamicFileMultiDirectoryMap = Pools.GetFileMultiDirectoryMap())
                {
                    var pathTable    = environment.Context.PathTable;
                    var files        = pooledFileSet.Instance;
                    var dynamicFiles = pooledDynamicFileMultiDirectoryMap.Instance;

                    using (m_masterService.Environment.Counters.StartStopwatch(PipExecutorCounter.RemoteWorker_CollectPipFilesToMaterializeDuration))
                    {
                        environment.State.FileContentManager.CollectPipFilesToMaterialize(
                            isMaterializingInputs: !materializingOutputs,
                            pipTable: environment.PipTable,
                            pip: runnable.Pip,
                            files: files,
                            dynamicFileMap: dynamicFiles,

                            // Only send content which is not already on the worker.
                            // TryAddAvailableHash can return null if the artifact is dynamic file in which case
                            // the file cannot be added to the set due to missing index (note that we're using ContentTrackingSet as the underlying set).
                            // In such a case we decide to include the artifact during the collection.
                            shouldInclude: artifact => TryAddAvailableHash(artifact) ?? true,
                            shouldIncludeServiceFiles: servicePipId => TryAddAvailableHash(servicePipId) ?? true);
                    }

                    using (m_masterService.Environment.Counters.StartStopwatch(PipExecutorCounter.RemoteWorker_CreateFileArtifactKeyedHashDuration))
                    {
                        // Now we have to consider both dynamicFiles map and files set so we union into the files set. If we only rely on files, then the following incorrect build can happen.
                        // Suppose that we have pip P that specifies D as an opaque output directory and D\f as an output file. Pip Q consumes D\f directly (not via directory dependency on D).
                        // Pip R consumes D. Suppose that the cache lookup's for Q and R happen on the same machine. Suppose that Q is processed first, TryAddAvailableHash(D/f)
                        // returns true because it's a declared output and it's added into the files set. Now, on processing R later, particularly in
                        // collecting the files of D to materialize, D\f is not included in the files set because TryAddAvailableHash(D/f) returns false, i.e.,
                        // it's a declared output and it's been added when processing Q. However, D/f is still populated to the dynamicFiles map.
                        files.UnionWith(dynamicFiles.Keys);

                        foreach (var file in files)
                        {
                            var  fileMaterializationInfo = environment.State.FileContentManager.GetInputContent(file);
                            bool isDynamicFile           = dynamicFiles.TryGetValue(file, out var dynamicDirectories) && dynamicDirectories.Count != 0;

                            var hash = new FileArtifactKeyedHash
                            {
                                RewriteCount = file.RewriteCount,
                                PathValue    = file.Path.Value.Value,
                                PathString   = isDynamicFile ? file.Path.ToString(pathTable) : null,
                            }.SetFileMaterializationInfo(pathTable, fileMaterializationInfo);

                            if (isDynamicFile)
                            {
                                hash.AssociatedDirectories = new List <BondDirectoryArtifact>();

                                foreach (var dynamicDirectory in dynamicDirectories)
                                {
                                    hash.AssociatedDirectories.Add(new BondDirectoryArtifact
                                    {
                                        // Path id of dynamic directory input can be sent to the remote worker because it appears in the pip graph, and thus in path table.
                                        DirectoryPathValue      = dynamicDirectory.Path.RawValue,
                                        DirectorySealId         = dynamicDirectory.PartialSealId,
                                        IsDirectorySharedOpaque = dynamicDirectory.IsSharedOpaque
                                    });
                                }
                            }

                            lock (m_hashListLock)
                            {
                                hashes.Add(hash);
                            }
                        }
                    }
                }
        }