public async Task PushIsRejectedForTheSameHash() { await Task.Yield(); int numberOfPushes = 100; await RunTestCase(async (rootPath, session, client) => { var bytes = ThreadSafeRandom.GetBytes(1 + 42); var input = Enumerable.Range(1, numberOfPushes) .Select(data => (stream: new MemoryStream(bytes), hash: ContentHashers.Get(HashType.Vso0).GetContentHash(bytes))) .ToList(); var pushTasks = input.Select( tpl => client.PushFileAsync( new OperationContext(_context), tpl.hash, tpl.stream, new CopyOptions(bandwidthConfiguration: null))).ToList(); var results = await Task.WhenAll(pushTasks); results.Any(r => r.Status == CopyResultCode.Rejected_OngoingCopy).Should().BeTrue(); var result = await client.PushFileAsync( new OperationContext(_context), input[0].hash, input[0].stream, new CopyOptions(bandwidthConfiguration: null)); result.Status.Should().Be(CopyResultCode.Rejected_ContentAvailableLocally); }); }
/// <nodoc /> internal static async Task <DedupNode> GetDedupNodeFromFileAsync(HashType hashType, string path) { var contentHasher = (DedupContentHasher <DedupNodeOrChunkHashAlgorithm>)ContentHashers.Get(hashType); using (var stream = FileStreamUtility.OpenFileStreamForAsync(path, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete)) { return(await contentHasher.HashContentAndGetDedupNodeAsync(stream)); } }
internal void CopyFileTo( [Required, Description("Machine to copy to")] string host, [Required, Description("Path to source file")] string sourcePath, [Description("File name where the GRPC port can be found when using cache service. 'CASaaS GRPC port' if not specified")] string grpcPortFileName, [Description("The GRPC port"), DefaultValue(0)] int grpcPort) { Initialize(); var context = new Context(_logger); var operationContext = new OperationContext(context, CancellationToken.None); var retryPolicy = RetryPolicyFactory.GetLinearPolicy(ex => ex is ClientCanRetryException, (int)_retryCount, TimeSpan.FromSeconds(_retryIntervalSeconds)); if (grpcPort == 0) { grpcPort = Helpers.GetGrpcPortFromFile(_logger, grpcPortFileName); } var hasher = ContentHashers.Get(HashType.MD5); var bytes = File.ReadAllBytes(sourcePath); var hash = hasher.GetContentHash(bytes); try { var path = new AbsolutePath(sourcePath); using Stream stream = File.OpenRead(path.Path); var config = GrpcCopyClientConfiguration.WithGzipCompression(false); config.BandwidthCheckerConfiguration = BandwidthChecker.Configuration.Disabled; using var clientCache = new GrpcCopyClientCache(context, new GrpcCopyClientCacheConfiguration() { GrpcCopyClientConfiguration = config }); var copyFileResult = clientCache.UseAsync(operationContext, host, grpcPort, (nestedContext, rpcClient) => { return(retryPolicy.ExecuteAsync( () => rpcClient.PushFileAsync(nestedContext, hash, stream, new CopyOptions(bandwidthConfiguration: null)), _cancellationToken)); }).GetAwaiter().GetResult(); if (!copyFileResult.Succeeded) { _tracer.Error(context, $"{copyFileResult}"); throw new CacheException(copyFileResult.ErrorMessage); } else { _tracer.Info(context, $"Copy of {sourcePath} was successful"); } } catch (Exception ex) { throw new CacheException(ex.ToString()); } }
private async Task <bool> ValidateNameHashesMatchContentHashesAsync(Context context) { int mismatchedParentDirectoryCount = 0; int mismatchedContentHashCount = 0; _tracer.Always(context, "Validating local CAS content hashes..."); await TaskSafetyHelpers.WhenAll(_enumerateBlobPathsFromDisk().Select( async blobPath => { var contentFile = blobPath.FullPath; if (!contentFile.FileName.StartsWith(contentFile.GetParent().FileName, StringComparison.OrdinalIgnoreCase)) { mismatchedParentDirectoryCount++; _tracer.Debug( context, $"The first {FileSystemContentStoreInternal.HashDirectoryNameLength} characters of the name of content file at {contentFile}" + $" do not match the name of its parent directory {contentFile.GetParent().FileName}."); } if (!FileSystemContentStoreInternal.TryGetHashFromPath(contentFile, out var hashFromPath)) { _tracer.Debug( context, $"The path '{contentFile}' does not contain a well-known hash name."); return; } var hasher = ContentHashers.Get(hashFromPath.HashType); ContentHash hashFromContents; using (var contentStream = await _fileSystem.OpenSafeAsync( contentFile, FileAccess.Read, FileMode.Open, FileShare.Read | FileShare.Delete, FileOptions.SequentialScan, HashingExtensions.HashStreamBufferSize)) { hashFromContents = await hasher.GetContentHashAsync(contentStream); } if (hashFromContents != hashFromPath) { mismatchedContentHashCount++; _tracer.Debug( context, $"Content at {contentFile} content hash {hashFromContents.ToShortString()} did not match expected value of {hashFromPath.ToShortString()}."); } })); _tracer.Always(context, $"{mismatchedParentDirectoryCount} mismatches between content file name and parent directory."); _tracer.Always(context, $"{mismatchedContentHashCount} mismatches between content file name and file contents."); return(mismatchedContentHashCount == 0 && mismatchedParentDirectoryCount == 0); }
internal void CopyFileTo( [Required, Description("Machine to copy to")] string host, [Required, Description("Path to source file")] string sourcePath, [Description("File name where the GRPC port can be found when using cache service. 'CASaaS GRPC port' if not specified")] string grpcPortFileName, [Description("The GRPC port"), DefaultValue(0)] int grpcPort) { Initialize(); var context = new Context(_logger); var operationContext = new OperationContext(context, CancellationToken.None); var retryPolicy = new RetryPolicy( new TransientErrorDetectionStrategy(), new FixedInterval("RetryInterval", (int)_retryCount, TimeSpan.FromSeconds(_retryIntervalSeconds), false)); if (grpcPort == 0) { grpcPort = Helpers.GetGrpcPortFromFile(_logger, grpcPortFileName); } var hasher = ContentHashers.Get(HashType.MD5); var bytes = File.ReadAllBytes(sourcePath); var hash = hasher.GetContentHash(bytes); try { using var clientCache = new GrpcCopyClientCache(context); using var rpcClientWrapper = clientCache.CreateAsync(host, grpcPort, useCompression: false).GetAwaiter().GetResult(); var rpcClient = rpcClientWrapper.Value; var path = new AbsolutePath(sourcePath); using Stream stream = File.OpenRead(path.Path); // This action is synchronous to make sure the calling application doesn't exit before the method returns. var copyFileResult = retryPolicy.ExecuteAsync(() => rpcClient.PushFileAsync(operationContext, hash, () => Task.FromResult(stream))).Result; if (!copyFileResult.Succeeded) { _logger.Error($"{copyFileResult}"); throw new CacheException(copyFileResult.ErrorMessage); } else { _logger.Info($"Copy of {sourcePath} was successful"); } } catch (Exception ex) { throw new CacheException(ex.ToString()); } }
/// <summary> /// Loads a configuration object from preprocessed json /// </summary> public static TConfig LoadPreprocessedConfig <TConfig>(string configurationPath, out string configHash, HostParameters hostParameters = null) { hostParameters ??= HostParameters.FromEnvironment(); var configJson = File.ReadAllText(configurationPath); configHash = ContentHashers.Get(HashType.Murmur).GetContentHash(Encoding.UTF8.GetBytes(configJson)).ToHex(); var preprocessor = DeploymentUtilities.GetHostJsonPreprocessor(hostParameters); var preprocessedConfigJson = preprocessor.Preprocess(configJson); var config = JsonSerializer.Deserialize <TConfig>(preprocessedConfigJson, DeploymentUtilities.ConfigurationSerializationOptions); return(config); }
public DeploymentManifest.FileSpec AddContent(string content) { var bytes = Encoding.UTF8.GetBytes(content); var hash = ContentHashers.Get(HashType.MD5).GetContentHash(bytes).ToString(); var downloadUrl = $"casaas://files?hash={hash}"; Content[downloadUrl] = bytes; return(new DeploymentManifest.FileSpec() { Size = bytes.Length, Hash = hash, DownloadUrl = downloadUrl }); }
public async Task PushFileAsync(bool limitProactiveCopies) { await Task.Yield(); _proactivePushCountLimit = limitProactiveCopies ? 1 : 10000; int numberOfPushes = 100; await RunTestCase(async (rootPath, session, client) => { var input = Enumerable.Range(1, numberOfPushes) .Select(r => ThreadSafeRandom.GetBytes(1 + 42)) .Select(data => (stream: new MemoryStream(data), hash: ContentHashers.Get(HashType.Vso0).GetContentHash(data))) .ToList(); var pushTasks = input.Select( tpl => client.PushFileAsync( new OperationContext(_context), tpl.hash, tpl.stream, new CopyOptions(bandwidthConfiguration: null))).ToList(); var results = await Task.WhenAll(pushTasks); if (limitProactiveCopies) { // We're doing 100 simultaneous copies, at least some of them should fail, because we're not willing to wait for the response. var error = results.FirstOrDefault(r => !r.Succeeded); error.Should().NotBeNull("At least one copy operation should fail."); error !.ErrorMessage.Should().Contain("CopyLimitReached"); } else { // All operation should succeed! results.All(r => r.ShouldBeSuccess().Succeeded).Should().BeTrue(); } }); }
/// <summary> /// Computes an hexidecimal content id for the given string /// </summary> public static string ComputeContentId(string value) { return(ContentHashers.Get(HashType.Murmur).GetContentHash(Encoding.UTF8.GetBytes(value)).ToHex()); }