public override int Analyze() { // By the time we get here, observed inputs collection is finished. So calling the garbage collector // before starting the main pip loop which uses a lot of memory. GC.Collect(); PopulatePipsAndFilesInGraph(); // Free up some memory before we dump out the graph m_nodesWithObservedInputs = null; m_fingerprintComputations = null; GC.Collect(); var outputWriter = new DependencyAnalyzerOutputWriter( m_outputFilePath, CachedGraph, OutputGraphVersion, m_allFiles, m_includeDirs ? m_allDirs : null, m_allPips, m_pathMappings); outputWriter.Write(); return(0); }
/// <summary> /// Constructs a new binary log reader for the given stream /// </summary> public BinaryLogReader(Stream logStream, PipExecutionContext context, bool closeStreamOnDispose = true) { Contract.Requires(logStream != null); Contract.Requires(context != null); LogStream = logStream; m_context = context; m_closeStreamOnDispose = closeStreamOnDispose; m_capturedPaths = new ConcurrentDenseIndex <AbsolutePath>(debug: false); m_capturedStrings = new ConcurrentDenseIndex <StringId>(debug: false); m_capturedStrings[0] = StringId.Invalid; m_logStreamReader = new EventReader(this); m_handlers = new EventHandler[1024]; var logIdBytes = new byte[BinaryLogger.LogIdByteLength]; if (logStream.Read(logIdBytes, 0, BinaryLogger.LogIdByteLength) == BinaryLogger.LogIdByteLength) { LogId = new Guid(logIdBytes); } else { LogId = null; } }
/// <summary> /// Class constructor. /// </summary> protected DirectedGraph() { m_lastNodeId = 0; m_edgeCount = 0; InEdges = new ConcurrentDenseIndex <NodeEdgeListHeader>(false); OutEdges = new ConcurrentDenseIndex <NodeEdgeListHeader>(false); NodeHeights = new ConcurrentDenseIndex <int>(false); }
protected PageableStore(PathTable pathTable, SymbolTable symbolTable, int initialBufferSize, bool debug) { Contract.RequiresNotNull(pathTable); Contract.RequiresNotNull(symbolTable); Contract.Requires(initialBufferSize >= 0); PathTable = pathTable; SymbolTable = symbolTable; m_initialBufferSize = initialBufferSize; m_itemLocations = new ConcurrentDenseIndex <ItemLocation>(debug); Debug = debug; CanWrite = true; }
/// <summary> /// Base constructor that every other constructor should call. /// </summary> private PipTable(PathTable pathTable, PageablePipStore store, ConcurrentDenseIndex <MutablePipState> mutables, int maxDegreeOfParallelism, bool debug) { Contract.Requires(store != null); Contract.Requires(mutables != null); m_store = store; m_mutables = mutables; m_serializationScheduler = new PipTableSerializationScheduler(maxDegreeOfParallelism, debug: debug, serializer: ProcessQueueItem); m_dummyHashSourceFilePip = new HashSourceFile( FileArtifact.CreateSourceFile( AbsolutePath.Create(pathTable, PathGeneratorUtilities.GetAbsolutePath("B", "DUMMY_HASH_SOURCE_FILE")))); }
/// <summary> /// Creates a new pip table /// </summary> public PipTable(PathTable pathTable, SymbolTable symbolTable, int initialBufferSize, int maxDegreeOfParallelism, bool debug) { Contract.Requires(pathTable != null); Contract.Requires(symbolTable != null); Contract.Requires(initialBufferSize >= 0); Contract.Requires(maxDegreeOfParallelism >= -1); Contract.Requires(maxDegreeOfParallelism > 0 || debug); m_store = new PageablePipStore(pathTable, symbolTable, initialBufferSize, debug); m_mutables = new ConcurrentDenseIndex <MutablePipState>(debug); m_serializationScheduler = new PipTableSerializationScheduler(maxDegreeOfParallelism, debug, ProcessQueueItem); }
/// <summary> /// Constructor used by deserialization /// </summary> private PipTable(PageablePipStore store, ConcurrentDenseIndex <MutablePipState> mutables, int pipCount, int maxDegreeOfParallelism) { Contract.Requires(store != null); Contract.Requires(mutables != null); m_lastId = pipCount; m_count = pipCount; m_store = store; m_mutables = mutables; m_serializationScheduler = new PipTableSerializationScheduler(maxDegreeOfParallelism, debug: false, serializer: ProcessQueueItem); m_serializationScheduler.Complete(); // Don't allow more changes }
public void ComputeActualConcurrency() { List <PipSpan> spans = new List <PipSpan>(); LongestRunningPips = GetSortedPips(20, true, n => true, n => Durations[n]); ShortestRunningProcesses = GetSortedPips(20, false, n => PipTypes[n] == PipType.Process, n => Durations[n]); foreach (var node in DataflowGraph.Nodes) { Interlocked.Increment(ref PipTypeCounts[(int)PipTypes[node]]); var startTime = StartTimes[node]; if (PipTypes[node] == PipType.Process) { spans.Add(new PipSpan() { Id = node, StartTime = startTime - MinStartTime, Duration = Durations[node] }); } } spans.Sort(new ComparerBuilder <PipSpan>().CompareByAfter(s => s.StartTime).CompareByAfter(s => s.Duration)); Spans = spans; ConcurrentDenseIndex <int> concurrencyIndex = new ConcurrentDenseIndex <int>(false); ConcurrentDenseIndex <int> concurrencyCount = new ConcurrentDenseIndex <int>(false); for (int i = 0; i < spans.Count; i++) { PipSpan s = spans[i]; ulong endTime = s.EndTime; for (int j = i; j < spans.Count; j++) { PipSpan t = spans[j]; if (t.StartTime <= s.EndTime) { int value = concurrencyIndex[(uint)j] + 1; concurrencyIndex[(uint)j] = value; Max(ref ActualConcurrency, value); } else { break; } } var c = concurrencyIndex[(uint)i]; concurrencyCount[(uint)c] = concurrencyCount[(uint)c] + 1; } }
/// <summary> /// Constructor used when deserializing a PageableStore /// </summary> protected PageableStore(PathTable pathTable, SymbolTable symbolTable, SerializedState state, int initialBufferSize) { Contract.RequiresNotNull(pathTable); Contract.RequiresNotNull(symbolTable); PathTable = pathTable; SymbolTable = symbolTable; m_lastId = state.LastId; // We expect no more writes after deserializing m_initialBufferSize = initialBufferSize; m_itemLocations = state.ItemLocations; Debug = state.Debug; m_pageStreams = state.PageStreams; }
/// <summary> /// Creates a new pip table /// </summary> public PipTable(PathTable pathTable, SymbolTable symbolTable, int initialBufferSize, int maxDegreeOfParallelism, bool debug) { Contract.Requires(pathTable != null); Contract.Requires(symbolTable != null); Contract.Requires(initialBufferSize >= 0); Contract.Requires(maxDegreeOfParallelism >= -1); Contract.Requires(maxDegreeOfParallelism > 0 || debug); m_store = new PageablePipStore(pathTable, symbolTable, initialBufferSize, debug); m_mutables = new ConcurrentDenseIndex <MutablePipState>(debug); m_serializationScheduler = new PipTableSerializationScheduler(maxDegreeOfParallelism, debug, ProcessQueueItem); AbsolutePath dummyFilePath = AbsolutePath.Create(pathTable, PathGeneratorUtilities.GetAbsolutePath("B", "DUMMY_HASH_SOURCE_FILE")); m_dummyHashSourceFilePip = new HashSourceFile(FileArtifact.CreateSourceFile(dummyFilePath)); }
public void Linear() { var c = new ConcurrentDenseIndex <int>(true); for (int i = 0; i < 160000; i++) { c[(uint)i] = i; } for (int i = 0; i < 160000; i++) { XAssert.AreEqual(i, c[(uint)i]); } XAssert.AreEqual( (160000 + ConcurrentDenseIndex <int> .DefaultBuffersCount - 1) / ConcurrentDenseIndex <int> .DefaultBuffersCount, c.BuffersCount); }
public void RandomAccess() { var c = new ConcurrentDenseIndex <int>(true); var r = new Random(0); for (int i = 0; i < 100; i++) { var index = unchecked ((uint)r.Next(int.MinValue, int.MaxValue)); c[index] = i; } r = new Random(0); for (int i = 0; i < 100; i++) { var index = unchecked ((uint)r.Next(int.MinValue, int.MaxValue)); XAssert.AreEqual(i, c[index]); } }
public async Task RandomAccessMultithreadedAsync() { var c = new ConcurrentDenseIndex <int>(true); var r = new Random(0); var tasks = new Task[100]; var mre = new ManualResetEvent(false); for (int i = 0; i < tasks.Length; i++) { var index = unchecked ((uint)r.Next(int.MinValue, int.MaxValue)); int j = i; tasks[j] = Task.Run( () => { mre.WaitOne(); c[index] = j; }); } mre.Set(); foreach (Task t in tasks) { await t; } r = new Random(0); for (int i = 0; i < tasks.Length; i++) { var index = unchecked ((uint)r.Next(int.MinValue, int.MaxValue)); int j = i; tasks[j] = Task.Run( () => { mre.WaitOne(); XAssert.AreEqual(c[index], j); }); } foreach (Task t in tasks) { await t; } }
protected static SerializedState ReadSerializedState(BuildXLReader reader) { Contract.RequiresNotNull(reader); bool debug = reader.ReadBoolean(); int streamCount = reader.ReadInt32(); int[] pageStreamLengths = new int[streamCount]; for (int i = 0; i < streamCount; i++) { pageStreamLengths[i] = reader.ReadInt32(); } List <PageStreamBase> streams = new List <PageStreamBase>(streamCount); for (int i = 0; i < streamCount; i++) { streams.Add(MemoryPageStream.Deserialize(reader, pageStreamLengths[i])); } Contract.Assert(streams.Count == streamCount); int lastId = reader.ReadInt32(); ConcurrentDenseIndex <ItemLocation> offsets = new ConcurrentDenseIndex <ItemLocation>(debug); for (uint i = 0; i < lastId; i++) { PageableStoreId id = new PageableStoreId(i + 1); var streamIdentifier = reader.ReadInt32(); var offset = reader.ReadInt32(); offsets[id.Value] = new ItemLocation(streams[streamIdentifier], offset); } return(new SerializedState() { Debug = debug, PageStreams = streams, ItemLocations = offsets, LastId = lastId, }); }
/// <summary> /// Attempts to compute how many pips actually ran concurrently (max) during the actual build. /// </summary> public void ComputeActualConcurrency() { List <PipSpan> spans = new List <PipSpan>(); LongestRunningPips = GetSortedPips(20, true, n => true, n => Durations[n]); ShortestRunningProcesses = GetSortedPips(20, false, n => GetPipType(n) == PipType.Process, n => Durations[n]); foreach (var node in DirectedGraph.Nodes) { var pipType = GetPipType(node); Interlocked.Increment(ref PipTypeCounts[(int)pipType]); var startTime = StartTimes[node]; if (pipType == PipType.Process) { var duration = Durations[node]; if (duration > 0) { if (MinStartTime > startTime) { System.Diagnostics.Debugger.Launch(); } spans.Add(new PipSpan() { Id = node, StartTime = startTime - MinStartTime, Duration = duration, }); } } } spans.Sort(new ComparerBuilder <PipSpan>().CompareByAfter(s => s.StartTime).CompareByAfter(s => s.Duration)); Spans = spans; ConcurrentDenseIndex <int> concurrencyIndex = new ConcurrentDenseIndex <int>(false); ConcurrentDenseIndex <int> concurrencyCount = new ConcurrentDenseIndex <int>(false); for (int i = 0; i < spans.Count; i++) { PipSpan s = spans[i]; ulong endTime = s.EndTime; for (int j = i; j < spans.Count; j++) { PipSpan t = spans[j]; if (t.StartTime < s.EndTime) { int value = concurrencyIndex[(uint)j] + 1; concurrencyIndex[(uint)j] = value; Max(ref ActualConcurrency, value); } else { break; } } var c = concurrencyIndex[(uint)i]; concurrencyCount[(uint)c] = concurrencyCount[(uint)c] + 1; } }
private static void CreateSpanImage(string path, List <PipSpan> spans, ulong totalTime, int threadCount) { Bitmap bitmap = new Bitmap(1200, 800); Graphics graphics = Graphics.FromImage(bitmap); int width = 1000; int height = 600; graphics.FillRectangle(Brushes.Gray, 100, 100, width, height); Pen pen = new Pen(Brushes.Green, 1); int bottomY = 700; ulong currentTime = 0; ulong timeInterval = totalTime / (ulong)width; ConcurrentDenseIndex <double> timeSliceConcurrencies = new ConcurrentDenseIndex <double>(false); ConcurrentDenseIndex <int> minConcurrencies = new ConcurrentDenseIndex <int>(false); ConcurrentDenseIndex <int> maxConcurrencies = new ConcurrentDenseIndex <int>(false); ConcurrentDenseIndex <int> concurrencyIndex = new ConcurrentDenseIndex <int>(false); for (int i = 0; i < spans.Count; i++) { PipSpan s = spans[i]; uint firstTimeSlice = (uint)(s.StartTime / timeInterval); uint lastTimeSlice = (uint)(s.EndTime / timeInterval); ((uint)width).Min(ref lastTimeSlice); uint firstFullTimeSlice = firstTimeSlice + 1; uint lastFullTimeSlice = lastTimeSlice - 1; for (uint sliceIndex = firstTimeSlice; sliceIndex <= lastTimeSlice; sliceIndex++) { ulong sliceStart = sliceIndex * timeInterval; ulong sliceEnd = sliceStart + timeInterval; s.StartTime.Max(ref sliceStart); s.EndTime.Min(ref sliceEnd); double concurrency = ((double)(sliceEnd - sliceStart)) / (double)timeInterval; timeSliceConcurrencies[sliceIndex] = timeSliceConcurrencies[sliceIndex] + concurrency; } } int lastConcurrency = 0; for (int i = 0; i < width; i++) { int x = 100 + i; Point bottom = new Point(x, bottomY); int concurrency = (int)lastConcurrency; int concurrencyHeight = (int)(timeSliceConcurrencies[(uint)i] * height / threadCount); var top = bottom; top.Offset(0, -concurrencyHeight); graphics.DrawLine(pen, top, bottom); currentTime += timeInterval; } bitmap.Save(path, ImageFormat.Png); }
public ConcurrentNodeDictionary(bool debug) { m_index = new ConcurrentDenseIndex <TValue>(debug: debug); }