static void Write(object data) { var ops = (FileOperations)data; for (int index = 0; index < ops.data.Length; index++) { // Basic spin lock ops.waitLock.WaitOne(); var op = ops.data[index]; if (op.bytes != null && op.bytes.Length > 0) { try { Directory.CreateDirectory(Path.GetDirectoryName(op.file)); File.WriteAllBytes(op.file, op.bytes.GetBuffer()); } catch (Exception e) { BuildLogger.LogException(e); } } } ((IDisposable)ops.waitLock).Dispose(); }
static void Read(object data) { var ops = (FileOperations)data; for (int index = 0; index < ops.data.Length; index++, ops.waitLock.Release()) { try { var op = ops.data[index]; if (File.Exists(op.file)) { byte[] bytes = File.ReadAllBytes(op.file); if (bytes.Length > 0) { op.bytes = new MemoryStream(bytes, false); } } ops.data[index] = op; } catch (Exception e) { BuildLogger.LogException(e); } } }
/// <inheritdoc /> public void SaveCachedData(IList <CachedInfo> infos) { if (infos == null || infos.Count == 0) { return; } using (m_Logger.ScopedStep(LogLevel.Info, $"SaveCachedData")) { m_Logger.AddEntrySafe(LogLevel.Info, $"Saving {infos.Count} infos"); // Setup Operations var ops = new FileOperations(infos.Count); using (m_Logger.ScopedStep(LogLevel.Info, "SetupOperations")) { for (int i = 0; i < infos.Count; i++) { var op = ops.data[i]; op.file = GetCachedInfoFile(infos[i].Asset); ops.data[i] = op; } } // Start writing thread SyncPendingSaves(); m_ActiveWriteThread = new Thread(Write); m_ActiveWriteThread.Start(ops); using (m_Logger.ScopedStep(LogLevel.Info, "SerializingCacheInfos")) { // Serialize data as previous data is being written out var formatter = new BinaryFormatter(); for (int index = 0; index < infos.Count; index++, ops.waitLock.Release()) { try { var op = ops.data[index]; var stream = new MemoryStream(); formatter.Serialize(stream, infos[index]); if (stream.Length > 0) { op.bytes = stream; ops.data[index] = op; // If we have a cache server connection, upload the cached data if (m_Uploader != null) { m_Uploader.QueueUpload(infos[index].Asset, GetCachedArtifactsDirectory(infos[index].Asset), new MemoryStream(stream.GetBuffer(), false)); } } } catch (Exception e) { BuildLogger.LogException(e); } } } } }
/// <inheritdoc /> public void LoadCachedData(IList <CacheEntry> entries, out IList <CachedInfo> cachedInfos) { if (entries == null) { cachedInfos = null; return; } if (entries.Count == 0) { cachedInfos = new List <CachedInfo>(); return; } // Setup Operations var ops = new FileOperations(entries.Count); for (int i = 0; i < entries.Count; i++) { var op = ops.data[i]; op.file = GetCachedInfoFile(entries[i]); ops.data[i] = op; } // Start file reading Thread thread = new Thread(Read); thread.Start(ops); cachedInfos = new List <CachedInfo>(entries.Count); // Deserialize as files finish reading var formatter = new BinaryFormatter(); for (int index = 0; index < entries.Count; index++) { // Basic wait lock ops.waitLock.WaitOne(); CachedInfo info = null; try { var op = ops.data[index]; if (op.bytes != null && op.bytes.Length > 0) { info = formatter.Deserialize(op.bytes) as CachedInfo; } } catch (Exception e) { BuildLogger.LogException(e); } cachedInfos.Add(info); } thread.Join(); ((IDisposable)ops.waitLock).Dispose(); // Validate cached data is reusable for (int i = 0; i < cachedInfos.Count; i++) { if (HasAssetOrDependencyChanged(cachedInfos[i])) { cachedInfos[i] = null; } } // If we have a cache server connection, download & check any missing info if (m_Downloader != null) { m_Downloader.DownloadMissing(entries, cachedInfos); } Assert.AreEqual(entries.Count, cachedInfos.Count); }
// We don't return from this function until all downloads are processed. So it is safe to dispose immediately after. public void DownloadMissing(IList <CacheEntry> entries, IList <CachedInfo> cachedInfos) { Assert.AreEqual(entries.Count, cachedInfos.Count); Directory.CreateDirectory(k_CachePath); m_Semaphore = new Semaphore(0, entries.Count); m_Client.DownloadFinished += ThreadedDownloadFinished; // Queue up downloads for the missing or invalid local data for (var index = 0; index < entries.Count; index++) { // Only download data for cachedInfos that are invalid if (cachedInfos[index] != null) { continue; } var entry = entries[index]; string finalHash = HashingMethods.Calculate(entry.Hash, m_GlobalHash).ToHash128().ToString(); var fileId = FileId.From(entry.Guid.ToString(), finalHash); // Download artifacts before info to ensure both are available when download for info returns var downloadArtifact = new FileDownloadItem(fileId, FileType.Resource, GetCachedArtifactsFile(entry)); m_Client.QueueDownload(downloadArtifact); var downloadInfo = new FileDownloadItem(fileId, FileType.Info, GetCachedInfoFile(entry)); m_Client.QueueDownload(downloadInfo); } // Check downloads to see if it is usable data var formatter = new BinaryFormatter(); for (var index = 0; index < entries.Count; index++) { // find the next invalid cachedInfo while (index < entries.Count && cachedInfos[index] != null) { index++; } // make sure we didn't go out of bounds looking for invalid entries if (index >= entries.Count) { break; } // Wait for info download m_Semaphore.WaitOne(); string tempInfoFile = GetCachedInfoFile(entries[index]); if (!File.Exists(tempInfoFile)) { continue; } try { CachedInfo info; using (var fileStream = new FileStream(tempInfoFile, FileMode.Open, FileAccess.Read)) info = formatter.Deserialize(fileStream) as CachedInfo; if (m_Cache.HasAssetOrDependencyChanged(info)) { continue; } // Not every info file will have artifacts. So just check to see if we downloaded something. // TODO: May want to extend CachedInfo with Artifact knowledge if there is a performance benefit? string tempArtifactFile = GetCachedArtifactsFile(entries[index]); string tempArtifactDir = Path.ChangeExtension(tempArtifactFile, ""); if (File.Exists(tempArtifactFile) && !FileCompressor.Decompress(tempArtifactFile, tempArtifactDir)) { continue; } // All valid, move downloaded data into place cachedInfos[index] = info; string targetInfoFile = m_Cache.GetCachedInfoFile(info.Asset); if (File.Exists(targetInfoFile)) { File.Delete(targetInfoFile); } else { Directory.CreateDirectory(Path.GetDirectoryName(targetInfoFile)); } File.Move(tempInfoFile, targetInfoFile); if (Directory.Exists(tempArtifactDir)) { string targetArtifactDir = m_Cache.GetCachedArtifactsDirectory(info.Asset); if (Directory.Exists(targetArtifactDir)) { Directory.Delete(targetArtifactDir, true); } Directory.Move(tempArtifactDir, targetArtifactDir); } } catch (Exception e) { BuildLogger.LogException(e); } } m_Client.ResetDownloadFinishedEventHandler(); ((IDisposable)m_Semaphore).Dispose(); m_Semaphore = null; Directory.Delete(k_CachePath, true); }
/// <inheritdoc /> public void LoadCachedData(IList <CacheEntry> entries, out IList <CachedInfo> cachedInfos) { if (entries == null) { cachedInfos = null; return; } if (entries.Count == 0) { cachedInfos = new List <CachedInfo>(); return; } using (m_Logger.ScopedStep(LogLevel.Info, "LoadCachedData")) { m_Logger.AddEntrySafe(LogLevel.Info, $"{entries.Count} items"); // Setup Operations var ops = new FileOperations(entries.Count); using (m_Logger.ScopedStep(LogLevel.Info, "GetCachedInfoFile")) { for (int i = 0; i < entries.Count; i++) { var op = ops.data[i]; op.file = GetCachedInfoFile(entries[i]); ops.data[i] = op; } } int cachedCount = 0; using (m_Logger.ScopedStep(LogLevel.Info, "Read and deserialize cache info")) { // Start file reading Thread thread = new Thread(Read); thread.Start(ops); cachedInfos = new List <CachedInfo>(entries.Count); // Deserialize as files finish reading Stopwatch deserializeTimer = Stopwatch.StartNew(); var formatter = new BinaryFormatter(); for (int index = 0; index < entries.Count; index++) { // Basic wait lock if (!ops.waitLock.WaitOne(0)) { deserializeTimer.Stop(); ops.waitLock.WaitOne(); deserializeTimer.Start(); } CachedInfo info = null; try { var op = ops.data[index]; if (op.bytes != null && op.bytes.Length > 0) { info = formatter.Deserialize(op.bytes) as CachedInfo; cachedCount++; } else { LogCacheMiss($"[Cache Miss]: Missing cache entry. Entry: {entries[index]}"); } } catch (Exception e) { BuildLogger.LogException(e); } cachedInfos.Add(info); } thread.Join(); ((IDisposable)ops.waitLock).Dispose(); deserializeTimer.Stop(); m_Logger.AddEntrySafe(LogLevel.Info, $"Time spent deserializing: {deserializeTimer.ElapsedMilliseconds}ms"); m_Logger.AddEntrySafe(LogLevel.Info, $"Local Cache hit count: {cachedCount}"); } using (m_Logger.ScopedStep(LogLevel.Info, "Check for changed dependencies")) { for (int i = 0; i < cachedInfos.Count; i++) { if (HasAssetOrDependencyChanged(cachedInfos[i])) { cachedInfos[i] = null; } } } // If we have a cache server connection, download & check any missing info int downloadedCount = 0; if (m_Downloader != null) { using (m_Logger.ScopedStep(LogLevel.Info, "Download Missing Entries")) { m_Downloader.DownloadMissing(entries, cachedInfos); downloadedCount = cachedInfos.Count(i => i != null) - cachedCount; } } m_Logger.AddEntrySafe(LogLevel.Info, $"Local Cache hit count: {cachedCount}, Cache Server hit count: {downloadedCount}"); Assert.AreEqual(entries.Count, cachedInfos.Count); } }