public void TryParseWithTypeSuccess(HashType hashType, string value) { ContentHash hash; Assert.True(ContentHash.TryParse(hashType, value, out hash)); Assert.Equal(value, hash.ToHex()); }
private static ContentHash ParseContentHash(string v) { ContentHash result; XAssert.IsTrue(ContentHash.TryParse(v, out result)); return(result); }
/// <summary> /// Parses <paramref name="format"/> into a <see cref="FileContentInfo"/> object. /// /// The value must correspond to the format defined by the <see cref="Render"/> method /// or <see cref="ContractException"/> is thrown. /// </summary> public static FileContentInfo Parse(string format) { Contract.Requires(format != null); string[] splits = format.Split(new[] { RenderSeparator }, StringSplitOptions.None); if (splits.Length != 2) { throw Contract.AssertFailure(I($"Invalid format: expected '{RenderSeparator}' to divide '{format}' into exactly 2 parts.")); } ContentHash hash; if (!ContentHash.TryParse(splits[0], out hash)) { throw Contract.AssertFailure(I($"Invalid ContentHash format: '{splits[0]}'")); } long length; if (!long.TryParse(splits[1], out length)) { throw Contract.AssertFailure(I($"Invalid file length format: '{splits[1]}'")); } return(new FileContentInfo(hash, length)); }
internal void CopyFile( [Required, Description("Machine to copy from")] string host, [Required, Description("Expected content hash")] string hashString, [Required, Description("Path to destination file")] string destinationPath, [Description("Whether or not GZip is enabled"), DefaultValue(false)] bool useCompressionForCopies, [Description("File name where the GRPC port can be found when using cache service. 'CASaaS GRPC port' if not specified")] string grpcPortFileName, [Description("The GRPC port"), DefaultValue(0)] int grpcPort) { Initialize(); var context = new Context(_logger); var retryPolicy = RetryPolicyFactory.GetLinearPolicy(ex => ex is ClientCanRetryException, (int)_retryCount, TimeSpan.FromSeconds(_retryIntervalSeconds)); if (grpcPort == 0) { grpcPort = Helpers.GetGrpcPortFromFile(_logger, grpcPortFileName); } if (!ContentHash.TryParse(hashString, out var hash)) { throw new CacheException($"Invalid content hash string provided: {hashString}"); } try { var config = new GrpcCopyClientConfiguration(); using var clientCache = new GrpcCopyClientCache(context, new GrpcCopyClientCacheConfiguration() { GrpcCopyClientConfiguration = config }); var finalPath = new AbsolutePath(destinationPath); var copyFileResult = clientCache.UseAsync(new OperationContext(context), host, grpcPort, (nestedContext, rpcClient) => { return(retryPolicy.ExecuteAsync( () => rpcClient.CopyFileAsync(nestedContext, hash, finalPath, new CopyOptions(bandwidthConfiguration: null) { CompressionHint = useCompressionForCopies ? CopyCompression.Gzip : CopyCompression.None, }), _cancellationToken)); }).GetAwaiter().GetResult(); if (!copyFileResult.Succeeded) { throw new CacheException(copyFileResult.ErrorMessage); } else { _logger.Debug($"Copy of {hashString} to {finalPath} was successful"); } } catch (Exception ex) { throw new CacheException(ex.ToString()); } }
internal void CopyFile( [Required, Description("Machine to copy from")] string host, [Required, Description("Expected content hash")] string hashString, [Required, Description("Path to destination file")] string destinationPath, [Description("Whether or not GZip is enabled"), DefaultValue(false)] bool useCompressionForCopies, [Description("File name where the GRPC port can be found when using cache service. 'CASaaS GRPC port' if not specified")] string grpcPortFileName, [Description("The GRPC port"), DefaultValue(0)] int grpcPort, [Description("Interval at which to check the bandwidth. 0 will disable bandwidth checks"), DefaultValue(0)] int bandwidthCheckIntervalSeconds, [Description("Minimum bandwidth required. Null will enable historical bandwidth check."), DefaultValue(null)] double?minimumBandwidthMbPerSec) { Initialize(); var context = new Context(_logger); var retryPolicy = new RetryPolicy( new TransientErrorDetectionStrategy(), new FixedInterval("RetryInterval", (int)_retryCount, TimeSpan.FromSeconds(_retryIntervalSeconds), false)); if (grpcPort == 0) { grpcPort = Helpers.GetGrpcPortFromFile(_logger, grpcPortFileName); } if (!ContentHash.TryParse(hashString, out var hash)) { throw new CacheException($"Invalid content hash string provided: {hashString}"); } try { var copyClientConfig = bandwidthCheckIntervalSeconds > 0 ? new GrpcCopyClient.Configuration(TimeSpan.FromSeconds(bandwidthCheckIntervalSeconds), minimumBandwidthMbPerSec, clientBufferSize: null) : GrpcCopyClient.Configuration.Default; using (var clientCache = new GrpcCopyClientCache(context, copyClientConfig)) using (var rpcClientWrapper = clientCache.CreateAsync(host, grpcPort, useCompressionForCopies).GetAwaiter().GetResult()) { var rpcClient = rpcClientWrapper.Value; var finalPath = new AbsolutePath(destinationPath); // This action is synchronous to make sure the calling application doesn't exit before the method returns. var copyFileResult = retryPolicy.ExecuteAsync(() => rpcClient.CopyFileAsync(context, hash, finalPath, CancellationToken.None)).Result; if (!copyFileResult.Succeeded) { throw new CacheException(copyFileResult.ErrorMessage); } else { _logger.Debug($"Copy of {hashString} to {finalPath} was successful"); } } } catch (Exception ex) { throw new CacheException(ex.ToString()); } }
internal void CopyFile( [Required, Description("Machine to copy from")] string host, [Required, Description("Expected content hash")] string hashString, [Required, Description("Path to destination file")] string destinationPath, [Description("Whether or not GZip is enabled"), DefaultValue(false)] bool useCompressionForCopies, [Description("File name where the GRPC port can be found when using cache service. 'CASaaS GRPC port' if not specified")] string grpcPortFileName, [Description("The GRPC port"), DefaultValue(0)] int grpcPort) { Initialize(); var context = new Context(_logger); var retryPolicy = new RetryPolicy( new TransientErrorDetectionStrategy(), new FixedInterval("RetryInterval", (int)_retryCount, TimeSpan.FromSeconds(_retryIntervalSeconds), false)); if (grpcPort == 0) { grpcPort = Helpers.GetGrpcPortFromFile(_logger, grpcPortFileName); } if (!ContentHash.TryParse(hashString, out ContentHash hash)) { throw new CacheException($"Invalid content hash string provided: {hashString}"); } try { using (var rpcClient = GrpcCopyClient.Create(host, grpcPort, useCompressionForCopies)) { var finalPath = new AbsolutePath(destinationPath); // This action is synchronous to make sure the calling application doesn't exit before the method returns. var copyFileResult = retryPolicy.ExecuteAsync(() => rpcClient.CopyFileAsync(context, hash, finalPath, CancellationToken.None)).Result; if (!copyFileResult.Succeeded) { throw new CacheException(copyFileResult.ErrorMessage); } else { _logger.Debug($"Copy of {hashString} to {finalPath} was successful"); } var shutdownResult = rpcClient.ShutdownAsync(context).Result; if (!shutdownResult.Succeeded) { throw new CacheException(shutdownResult.ErrorMessage); } } } catch (Exception ex) { throw new CacheException(ex.ToString()); } }
/// <nodoc /> public DownloaderArgs(string[] args) : base(args) { foreach (Option opt in Options) { if (opt.Name.Equals("url", StringComparison.OrdinalIgnoreCase)) { if (Uri.TryCreate(opt.Value, UriKind.Absolute, out var url)) { Url = url; } else { throw Error($"Invalid URI: {opt.Value}"); } } else if (opt.Name.Equals("fileName", StringComparison.OrdinalIgnoreCase)) { FileName = opt.Value; } else if (opt.Name.Equals("hash", StringComparison.OrdinalIgnoreCase)) { if (!ContentHash.TryParse(opt.Value, out var hash)) { throw Error($"Invalid hash value '{opt.Value}' for '{Url}'. It must be a valid content hash format i.e. 'VSO0:000000000000000000000000000000000000000000000000000000000000000000'."); } Hash = hash; } else if (opt.Name.Equals("downloadDirectory", StringComparison.OrdinalIgnoreCase)) { DownloadDirectory = opt.Value; } else { throw Error($"Unsupported option: {opt.Name}."); } } if (Url == null) { throw Error($"Missing mandatory argument 'Url'"); } if (string.IsNullOrWhiteSpace(DownloadDirectory)) { throw Error($"Missing mandatory argument 'DownloadDirectory'"); } if (string.IsNullOrEmpty(FileName)) { FileName = Path.GetFileName(Url.AbsolutePath.TrimEnd(new[] { '/', '\\' })); } }
private bool ValidateAndExtractDownloadData( FrontEndContext context, IDownloadFileSettings downloadSettings, Dictionary <string, DownloadData> downloads, AbsolutePath resolverFolder, out DownloadData downloadData) { downloadData = null; if (string.IsNullOrEmpty(downloadSettings.ModuleName)) { Logger.Log.DownloadFrontendMissingModuleId(m_context.LoggingContext, downloadSettings.Url); return(false); } if (downloads.ContainsKey(downloadSettings.ModuleName)) { Logger.Log.DownloadFrontendDuplicateModuleId(m_context.LoggingContext, downloadSettings.ModuleName, Kind, Name); return(false); } if (string.IsNullOrEmpty(downloadSettings.Url)) { Logger.Log.DownloadFrontendMissingUrl(m_context.LoggingContext, downloadSettings.ModuleName); return(false); } if (!Uri.TryCreate(downloadSettings.Url, UriKind.Absolute, out var downloadLocation)) { Logger.Log.DownloadFrontendInvalidUrl(m_context.LoggingContext, downloadSettings.ModuleName, downloadSettings.Url); return(false); } ContentHash?contentHash; if (string.IsNullOrEmpty(downloadSettings.Hash)) { contentHash = null; } else { if (!ContentHash.TryParse(downloadSettings.Hash, out var hash)) { Logger.Log.DownloadFrontendHashValueNotValidContentHash(m_context.LoggingContext, downloadSettings.ModuleName, downloadSettings.Url, downloadSettings.Hash); return(false); } contentHash = hash; } downloadData = new DownloadData(context, downloadSettings, downloadLocation, resolverFolder, contentHash); return(true); }
private static Selector ToSelector(RedisValue value) { var parts = value.ToString().Split(new[] { RedisValueSeparator }, 2, StringSplitOptions.RemoveEmptyEntries); ContentHash contentHash; if (!ContentHash.TryParse(parts[0], out contentHash)) { contentHash = default(ContentHash); } byte[] output = parts.Length > 1 ? HexUtilities.HexToBytes(parts[1]) : null; return(new Selector(contentHash, output)); }
public IReadOnlyList <ShortHash> ParseHashes(string hashesString) { List <ShortHash> hashes = new List <ShortHash>(); var hashStrings = hashesString.Split(new[] { ", " }, StringSplitOptions.None); foreach (var hashString in hashStrings) { var result = ContentHash.TryParse(hashString + "7EDA1A01E8C646750D9C2F9B426335A047710D556D2D00", out var hash); Contract.Assert(result); hashes.Add(new ShortHash(hash)); } return(hashes); }
public Task SameHashWillBeIncrementalAndDifferentHashWillReDownload() { ContentHash.TryParse("VSO0:E87891E21CD24671B953BAB6A2D6F9C91049C67F9EA0E5015620C3DBC766EDC500", out var helloWorldHash); ContentHash.TryParse("VSO0:1D6240B6C13AC7B412F81EF6BF26A529C8D9B6BF3EC6D3F9E5305EB922F050F700", out var helloGalaxyHash); var data = GetSampleData(TestServer + "file.txt", DownloadArchiveType.File, helloWorldHash); var downloadedFile = data.DownloadedFilePath.ToString(FrontEndContext.PathTable); return(TestDownloadResolver( data, async resolver => { await resolver.PerformDownloadOrIncrementalCheckAsync(data); Assert.Equal(1, resolver.Statistics.Downloads.Total.Count); Assert.Equal(0, resolver.Statistics.Downloads.SkippedDueToManifest.Count); Assert.Equal(0, resolver.Statistics.Downloads.Failures.Count); Assert.Equal(1, m_webRequestCount); Assert.True(File.Exists(downloadedFile)); Assert.Equal("Hello World", File.ReadAllText(downloadedFile)); Assert.False(File.Exists(data.DownloadManifestFile.ToString(FrontEndContext.PathTable))); await resolver.PerformDownloadOrIncrementalCheckAsync(data); Assert.Equal(2, resolver.Statistics.Downloads.Total.Count); Assert.Equal(1, resolver.Statistics.Downloads.SkippedDueToManifest.Count); Assert.Equal(0, resolver.Statistics.Downloads.Failures.Count); Assert.Equal(1, m_webRequestCount); // Force update to server m_useGalaxyInsteadOfWorldFromServer = true; var newData = GetSampleData(TestServer + "file.txt", DownloadArchiveType.File, helloGalaxyHash); await resolver.PerformDownloadOrIncrementalCheckAsync(newData); Assert.True(File.Exists(downloadedFile)); Assert.Equal("Hello Galaxy", File.ReadAllText(downloadedFile)); Assert.Equal(3, resolver.Statistics.Downloads.Total.Count); Assert.Equal(1, resolver.Statistics.Downloads.SkippedDueToManifest.Count); Assert.Equal(2, m_webRequestCount); await resolver.PerformDownloadOrIncrementalCheckAsync(newData); Assert.Equal(4, resolver.Statistics.Downloads.Total.Count); Assert.Equal(2, resolver.Statistics.Downloads.SkippedDueToManifest.Count); Assert.Equal(2, m_webRequestCount); } )); }
/// <summary> /// Try to parse strings generated by <see cref="ToString"/> /// </summary> public static bool TryParse(string val, out BuildManifestFileInfo fileInfo) { fileInfo = null; var splits = val.Split(Separator); if (splits.Length < MinFieldCount) { return(false); } var relativePath = splits[0]; if (string.IsNullOrWhiteSpace(relativePath)) { return(false); } if (!ContentHash.TryParse(splits[1], out var azureArtifactsHash)) { return(false); } var manifestHashes = new List <ContentHash>(splits.Length - 2); foreach (var serializedHash in splits.Skip(2)) { if (ContentHash.TryParse(serializedHash, out var hash)) { manifestHashes.Add(hash); } else { return(false); } } fileInfo = new BuildManifestFileInfo( relativePath, azureArtifactsHash, manifestHashes); return(true); }
/// <inheritdoc /> public override object ReadJson( JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { JObject jsonObject = JObject.Load(reader); byte[] payload = null; JToken payloadValue; if (jsonObject.TryGetValue(PayloadFieldName, StringComparison.Ordinal, out payloadValue)) { payload = HexUtilities.HexToBytes(payloadValue.Value <string>()); } JToken expirationUtcToken; var expirationUtc = jsonObject.TryGetValue(ExpirationUtcFieldName, StringComparison.Ordinal, out expirationUtcToken) ? DateTime.FromBinary(expirationUtcToken.Value <long>()) : CacheDeterminism.Expired; CacheDeterminism determinism = CacheDeterminism.ViaCache( Guid.Parse(jsonObject.GetValue(DeterminismFieldName, StringComparison.Ordinal).Value <string>()), expirationUtc); var contentHashes = new List <ContentHash>(); foreach ( string contentHashString in jsonObject.GetValue(HashesFieldName, StringComparison.Ordinal).Values <string>()) { ContentHash deserializedContentHash; if (!ContentHash.TryParse(contentHashString, out deserializedContentHash)) { throw new InvalidDataException("Unable to parse hash out of JSON Token"); } contentHashes.Add(deserializedContentHash); } var contentHashList = new ContentHashList(contentHashes.ToArray(), payload); return(new ContentHashListWithDeterminism(contentHashList, determinism)); }
/// <summary> /// Gets a selector object from the JSON reader. /// </summary> internal object GetSelectorFromJReader(JsonReader reader) { JObject jsonObject = JObject.Load(reader); byte[] output = null; JToken outputToken; if (jsonObject.TryGetValue(OutputFieldName, StringComparison.Ordinal, out outputToken)) { output = HexUtilities.HexToBytes(outputToken.Value<string>()); } var contentHashString = jsonObject.GetValue(ContentHashFieldName, StringComparison.Ordinal).Value<string>(); ContentHash deserializedContentHash; if (!ContentHash.TryParse(contentHashString, out deserializedContentHash)) { throw new InvalidDataException("Unable to parse hash out of JSON Token"); } return new Selector(deserializedContentHash, output); }
public void TryParseWithoutTypeFail(string value) { ContentHash hash; Assert.False(ContentHash.TryParse(value, out hash)); }
private IEnumerable <StrongFingerprint> EnumerateUniqueStrongFingerprints(StreamReader reader) { ConcurrentDictionary <StrongFingerprint, int> uniqueStrongFingerprints = new ConcurrentDictionary <StrongFingerprint, int>(); // Look for pattern: GetContentHashList(WeakFingerprint=[8033C0365DE491734D48A85A5709099B9B6A02D2],Selector=[ContentHash=[VSO0:000000000000000000000000000000000000000000000000000000000000000000], Output=[D697E34F2B7242DE55AFA03220E72DE2ED1D7DE0]]) start // Hits on this will also hit AddOrGetContentHashList: // AddOrGetContentHashList(WeakFingerprint=[AF00A265EB9B856129B5CBB41D5B7FE15D0CBC26],Selector=[ContentHash=[VSO0:000000000000000000000000000000000000000000000000000000000000000000], Output=[56552E044A46FA8AB1AC8660C62221A4BE8497C4]]) start const string strongFingerprintPattern = @"WeakFingerprint=\[(?<weakFingerprint>\w*)\],Selector=\[ContentHash=\[(?<selectorHash>[^\]]*)\], Output=\[(?<selectorOutput>\w*)\]\]"; Regex sfpRegex = new Regex(strongFingerprintPattern, RegexOptions.IgnoreCase); string currentLine = reader.ReadLine(); while (currentLine != null) { Match match = sfpRegex.Match(currentLine); if (match.Success && match.Groups["weakFingerprint"].Success && match.Groups["selectorHash"].Success && match.Groups["selectorOutput"].Success) { string weakFingerprintString = match.Groups["weakFingerprint"].Value; string selectorHashString = match.Groups["selectorHash"].Value; string selectorOutputString = match.Groups["selectorOutput"].Value; Fingerprint weakFingerprint; ContentHash selectorHash; byte[] selectorOutput = null; if (Fingerprint.TryParse(weakFingerprintString, out weakFingerprint) && ContentHash.TryParse(selectorHashString, out selectorHash)) { if (!string.IsNullOrEmpty(selectorOutputString)) { selectorOutput = HexUtilities.HexToBytes(selectorOutputString); } StrongFingerprint strongFingerprint = new StrongFingerprint( weakFingerprint, new Selector(selectorHash, selectorOutput)); if (uniqueStrongFingerprints.TryAdd(strongFingerprint, 0)) { yield return(strongFingerprint); } } } currentLine = reader.ReadLine(); } }
private bool ValidateAndExtractDownloadData( FrontEndContext context, IDownloadFileSettings downloadSettings, Dictionary <string, DownloadData> downloads, AbsolutePath resolverFolder, out DownloadData downloadData) { downloadData = null; if (string.IsNullOrEmpty(downloadSettings.ModuleName)) { Logger.Log.DownloadFrontendMissingModuleId(m_context.LoggingContext, downloadSettings.Url); return(false); } if (downloads.ContainsKey(downloadSettings.ModuleName)) { Logger.Log.DownloadFrontendDuplicateModuleId(m_context.LoggingContext, downloadSettings.ModuleName, Kind, Name); return(false); } if (string.IsNullOrEmpty(downloadSettings.Url)) { Logger.Log.DownloadFrontendMissingUrl(m_context.LoggingContext, downloadSettings.ModuleName); return(false); } if (!Uri.TryCreate(downloadSettings.Url, UriKind.Absolute, out var downloadLocation)) { Logger.Log.DownloadFrontendInvalidUrl(m_context.LoggingContext, downloadSettings.ModuleName, downloadSettings.Url); return(false); } ContentHash?contentHash; if (string.IsNullOrEmpty(downloadSettings.Hash)) { contentHash = null; } else { if (!ContentHash.TryParse(downloadSettings.Hash, out var hash)) { Logger.Log.DownloadFrontendHashValueNotValidContentHash(m_context.LoggingContext, downloadSettings.ModuleName, downloadSettings.Url, downloadSettings.Hash); return(false); } contentHash = hash; } // For these last two validations, the TS parser would also complain, but we just give a more targeted error before that happens. if (!string.IsNullOrEmpty(downloadSettings.DownloadedValueName) && !SymbolAtom.TryCreate(context.StringTable, downloadSettings.DownloadedValueName, out _)) { Logger.Log.NameContainsInvalidCharacters(m_context.LoggingContext, "downloadedValueName", downloadSettings.DownloadedValueName); return(false); } if (!string.IsNullOrEmpty(downloadSettings.ExtractedValueName) && !SymbolAtom.TryCreate(context.StringTable, downloadSettings.ExtractedValueName, out _)) { Logger.Log.NameContainsInvalidCharacters(m_context.LoggingContext, "extractedValueName", downloadSettings.ExtractedValueName); return(false); } downloadData = new DownloadData(context, downloadSettings, downloadLocation, resolverFolder, contentHash, downloadSettings.DownloadedValueName, downloadSettings.ExtractedValueName); return(true); }
public void TryParseWithTypeFail(HashType hashType, string value) { ContentHash hash; Assert.False(ContentHash.TryParse(hashType, value, out hash)); }
/// <nodoc /> public static async Task <ExtractIncrementalState> TryLoadAsync(Logger logger, FrontEndContext context, DownloadData downloadData) { var manifestFilePath = downloadData.ExtractManifestFile.ToString(context.PathTable); ExtractIncrementalState result = null; if (!FileUtilities.Exists(manifestFilePath)) { return(null); } using (var reader = new StreamReader(manifestFilePath)) { var versionLine = await reader.ReadLineAsync(); if (versionLine == null || !string.Equals(versionLine, ManifestVersion, StringComparison.Ordinal)) { logger.ExtractManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "version", ManifestVersion, versionLine); return(null); } var urlLine = await reader.ReadLineAsync(); if (!string.Equals(urlLine, downloadData.Settings.Url, StringComparison.Ordinal)) { logger.ExtractManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "url", downloadData.Settings.Url, urlLine); return(null); } var archiveTypeLine = await reader.ReadLineAsync(); if (archiveTypeLine == null || !Enum.TryParse <DownloadArchiveType>(archiveTypeLine, out var archiveType) || archiveType != downloadData.Settings.ArchiveType) { logger.ExtractManifestDoesNotMatch(context.LoggingContext, downloadData.Settings.ModuleName, downloadData.Settings.Url, "archiveType", downloadData.Settings.ArchiveType.ToString(), archiveTypeLine); return(null); } var fileCountLine = await reader.ReadLineAsync(); if (fileCountLine == null || !uint.TryParse(fileCountLine, out var fileCount)) { return(null); } var hashes = new Dictionary <AbsolutePath, ContentHash>(); var files = new FileArtifact[fileCount]; for (int i = 0; i < fileCount; i++) { var filePathLine = await reader.ReadLineAsync(); if (filePathLine == null || !RelativePath.TryCreate(context.StringTable, filePathLine, out var relativeFilePath)) { return(null); } var hashLine = await reader.ReadLineAsync(); if (hashLine == null || !ContentHash.TryParse(hashLine, out var contentHash)) { return(null); } var filePath = downloadData.ContentsFolder.Path.Combine(context.PathTable, relativeFilePath); files[i] = FileArtifact.CreateSourceFile(filePath); hashes[filePath] = contentHash; } var sortedFiles = SortedReadOnlyArray <FileArtifact, OrdinalFileArtifactComparer> .SortUnsafe( files, OrdinalFileArtifactComparer.Instance); result = new ExtractIncrementalState(downloadData, sortedFiles, hashes); } return(result); }
public void TryParseWithoutTypeSuccess(string value) { ContentHash hash; Assert.True(ContentHash.TryParse(value, out hash)); }