private async Task <BoolResult> ShutdownStoresAsync(Context context)
        {
            var tasks = new List <Task <BoolResult> >(StoresByName.Count);

            tasks.AddRange(StoresByName.Select(kvp => kvp.Value.ShutdownAsync(context)));
            await TaskSafetyHelpers.WhenAll(tasks);

            var result = BoolResult.Success;

            foreach (var task in tasks)
            {
                var r = await task;
                if (!result.Succeeded && r.Succeeded)
                {
                    result = r;
                }
            }

            return(result);
        }
Beispiel #2
0
        public Task OpenStreamParallelPinsToMultiplePinContexts()
        {
            var context = new Context(Logger);

            return(TestStore(context, _clock, async store =>
            {
                ContentHash contentHash;
                using (var pinContext = store.CreatePinContext())
                {
                    // Pin some new content.
                    const int size = MaxSizeHard / 3;
                    var r = await store.PutRandomAsync(context, size, ContentHashType, new PinRequest(pinContext));
                    contentHash = r.ContentHash;
                    store.PinMapForTest.ContainsKey(contentHash).Should().BeTrue();
                    store.PinMapForTest[contentHash].Count.Should().Be(1);

                    // Open multiple streams to same content, each pinning to a separate pin context.
                    var streams = Enumerable.Repeat <Stream>(null, NumParallelTasks).ToList();
                    var contexts = Enumerable.Range(0, NumParallelTasks).Select(i => store.CreatePinContext()).ToList();
                    var tasks = Enumerable.Range(0, NumParallelTasks).Select(i => Task.Run(async() => streams[i] =
                                                                                               (await store.OpenStreamAsync(context, contentHash, new PinRequest(contexts[i]))).Stream));
                    await TaskSafetyHelpers.WhenAll(tasks);
                    store.PinMapForTest[contentHash].Count.Should().Be(NumParallelTasks + 1);

                    // Disposing the streams to not unpin the content.
                    streams.ForEach(stream => stream.Dispose());
                    store.PinMapForTest[contentHash].Count.Should().Be(NumParallelTasks + 1);

                    // Disposing the separate pin contexts still does not unpin the content.
                    contexts.ForEach(c => c.Dispose());
                    store.PinMapForTest[contentHash].Count.Should().Be(1);

                    await pinContext.DisposeAsync();
                }

                // After all pin contexts disposed, content is no longer pinned.
                Pin pin;
                store.PinMapForTest.TryGetValue(contentHash, out pin).Should().Be(false);
            }));
        }
Beispiel #3
0
        private Task RunManyForStoreAcrossServerRestartAsync(Func <Context, IContentStore, Task> requestFunc)
        {
            return(RunStoreTestAsync(async(context, store) =>
            {
                // Launch a bunch of duplicate requests in the background, with server restart mixed in.
                var tasks = new List <Task>(101);
                tasks.AddRange(Enumerable.Range(0, 50).Select(i => Task.Run(() =>
                                                                            requestFunc(new Context(Logger), store))));
                tasks.Add(((ITestServiceClientContentStore)store).RestartServerAsync(context));
                tasks.AddRange(Enumerable.Range(0, 50).Select(i => Task.Run(() =>
                                                                            requestFunc(new Context(Logger), store))));

                try
                {
                    await TaskSafetyHelpers.WhenAll(tasks);
                }
                catch (AggregateException ex)
                {
                    AggregateException singleException = ex.Flatten();
                    string failureMessage = string.Join(",", singleException.InnerExceptions.Select(x => x.Message));
                    Assert.True(false, failureMessage);
                }
            }));
        }
Beispiel #4
0
        public void DedupHashFile
        (
            [Required] string[] path,
            [DefaultValue(false)] bool chunks,
            [DefaultValue(false)] bool childNodes,
            [DefaultValue(false)] bool rollingHash,
            [DefaultValue(FileSystemConstants.FileIOBufferSize)] int bufferSize,
            [DefaultValue((long)0)] long startOffset
        )
        {
            Initialize();

            _displayChunks     = chunks;
            _displayChildNodes = childNodes;

            var paths = new List <AbsolutePath>();

            foreach (AbsolutePath root in path.Select(p => new AbsolutePath(p)))
            {
                if (_fileSystem.DirectoryExists(root))
                {
                    paths.AddRange(_fileSystem.EnumerateFiles(root, EnumerateOptions.Recurse).Select(fileInfo => fileInfo.FullPath));
                }
                else if (_fileSystem.FileExists(root))
                {
                    paths.Add(root);
                }
                else
                {
                    throw new ArgumentException("given path is not an existing file or directory");
                }
            }

            var buffer = new byte[bufferSize];

            using (var hasher = new DedupNodeHashAlgorithm(rollingHash ? DedupNodeTree.Algorithm.RollingHash : DedupNodeTree.Algorithm.MaximallyPacked))
            {
                foreach (var p in paths)
                {
                    hasher.Initialize();
                    TaskSafetyHelpers.SyncResultOnThreadPool(async() =>
                    {
                        using (var fs = await _fileSystem.OpenReadOnlySafeAsync(p, FileShare.Read | FileShare.Delete))
                        {
                            fs.Position = startOffset;
                            int bytesRead;
                            while ((bytesRead = await fs.ReadAsync(buffer, 0, buffer.Length)) > 0)
                            {
                                hasher.TransformBlock(buffer, 0, bytesRead, null, 0);
                            }
                            hasher.TransformFinalBlock(new byte[0], 0, 0);
                            DedupNode root = hasher.GetNode();
                            ulong offset   = 0;
                            LogNode(true, string.Empty, root, p, ref offset);
                        }

                        return(0);
                    });
                }
            }

            _logger.Always("Totals:");
            _logger.Always($"Bytes: Unique={_uniqueBytes:N0} Total={_totalBytes:N0}");
            _logger.Always($"Chunks: Unique={_allChunks.Count:N0} Total={_totalChunks:N0}");
            _logger.Always($"Nodes: Unique={_allNodes.Count:N0} Total={_totalNodes:N0}");
        }
Beispiel #5
0
        protected async Task RunTestAsync(
            Context context,
            int storeCount,
            Func <TestContext, Task> testFunc,
            ImplicitPin implicitPin           = ImplicitPin.PutAndGet,
            bool enableDistributedEviction    = false,
            int?replicaCreditInMinutes        = null,
            bool enableRepairHandling         = false,
            bool emptyFileHashShortcutEnabled = false,
            int iterations = 1)
        {
            var indexedDirectories = Enumerable.Range(0, storeCount)
                                     .Select(i => new { Index = i, Directory = new DisposableDirectory(FileSystem, TestRootDirectoryPath / i.ToString()) })
                                     .ToList();
            var testFileCopier = new TestFileCopier();

            for (int iteration = 0; iteration < iterations; iteration++)
            {
                var stores = indexedDirectories.Select(
                    directory =>
                    CreateStore(
                        context,
                        testFileCopier,
                        directory.Directory,
                        directory.Index,
                        enableDistributedEviction,
                        replicaCreditInMinutes,
                        enableRepairHandling,
                        emptyFileHashShortcutEnabled)).ToList();

                var startupResults = await TaskSafetyHelpers.WhenAll(stores.Select(async store => await store.StartupAsync(context)));

                Assert.True(startupResults.All(x => x.Succeeded), $"Failed to startup: {string.Join(Environment.NewLine, startupResults.Where(s => !s))}");

                var id       = 0;
                var sessions = stores.Select(store => store.CreateSession(context, "store" + id++, implicitPin).Session).ToList();
                await TaskSafetyHelpers.WhenAll(sessions.Select(async session => await session.StartupAsync(context)));

                var testContext = new TestContext(context, testFileCopier, indexedDirectories.Select(p => p.Directory).ToList(), sessions, stores, iteration);
                await testFunc(testContext);

                await TaskSafetyHelpers.WhenAll(
                    sessions.Select(async session =>
                {
                    if (!session.ShutdownCompleted)
                    {
                        await session.ShutdownAsync(context).ThrowIfFailure();
                    }
                }));

                sessions.ForEach(session => session.Dispose());

                await TaskSafetyHelpers.WhenAll(Enumerable.Range(0, storeCount).Select(storeId => LogStats(testContext, storeId)));

                await TaskSafetyHelpers.WhenAll(stores.Select(async store => await store.ShutdownAsync(context)));

                stores.ForEach(store => store.Dispose());
            }

            indexedDirectories.ForEach(directory => directory.Directory.Dispose());
        }
        private async Task <ContentMap> DeserializeBodyAsync(Context context, MemoryContentDirectoryHeader header, AbsolutePath path, bool isLoadingBackup)
        {
            var contentDirectory = new ContentMap();

            try
            {
                var sw = Stopwatch.StartNew();
                using (var stream = await _fileSystem.OpenSafeAsync(path, FileAccess.Read, FileMode.Open, FileShare.Read))
                {
                    byte[] headerBuffer = new byte[header.HeaderSize];
                    stream.Read(headerBuffer, 0, header.HeaderSize);

                    var streamSync       = new object();
                    var entriesSync      = new object();
                    var entries          = new List <KeyValuePair <ContentHash, ContentFileInfo> >(header.EntryCount);
                    var entriesRemaining = header.EntryCount;
                    var tasks            = new List <Task>();
                    var nowFileTimeUtc   = DateTime.UtcNow.ToFileTimeUtc();

                    long totalSize                  = 0;
                    long totalUniqueSize            = 0;
                    long oldestContentAccessTimeUtc = nowFileTimeUtc;
                    long totalReplicaCount          = 0;
                    var  statsLock                  = new object();

                    Action <int> readChunk = count =>
                    {
                        var bufferLength = count * BinaryEntrySize;
                        var buffer       = new byte[bufferLength];
                        int bytesRead;

                        lock (streamSync)
                        {
                            bytesRead = stream.Read(buffer, 0, bufferLength);
                        }

                        if (bytesRead != buffer.Length)
                        {
                            return;
                        }

                        var serializeContext = new BufferSerializeContext(buffer);
                        var partitionEntries = new List <KeyValuePair <ContentHash, ContentFileInfo> >(count);

                        for (var i = 0; i < count; i++)
                        {
                            var contentHash             = serializeContext.DeserializeFullContentHash();
                            var fileSize                = serializeContext.DeserializeInt64();
                            var lastAccessedFileTimeUtc = serializeContext.DeserializeInt64();

                            // Guard against corruption of serialized timestamps which affect LRU. If we get something out of range,
                            // force it to now.
                            if (lastAccessedFileTimeUtc < 0 || lastAccessedFileTimeUtc > nowFileTimeUtc)
                            {
                                lastAccessedFileTimeUtc = nowFileTimeUtc;
                            }

                            // ReSharper disable once UnusedVariable
                            var accessCount  = serializeContext.DeserializeInt32();
                            var replicaCount = serializeContext.DeserializeInt32();

                            var contentFileInfo = new ContentFileInfo(fileSize, lastAccessedFileTimeUtc, replicaCount);
                            Interlocked.Add(ref totalSize, fileSize * replicaCount);
                            Interlocked.Add(ref totalUniqueSize, fileSize);
                            Interlocked.Add(ref totalReplicaCount, replicaCount);

                            if (oldestContentAccessTimeUtc > lastAccessedFileTimeUtc)
                            {
                                lock (statsLock)
                                {
                                    if (oldestContentAccessTimeUtc > lastAccessedFileTimeUtc)
                                    {
                                        oldestContentAccessTimeUtc = lastAccessedFileTimeUtc;
                                    }
                                }
                            }

                            partitionEntries.Add(new KeyValuePair <ContentHash, ContentFileInfo>(contentHash, contentFileInfo));
                        }

                        lock (entriesSync)
                        {
                            entries.AddRange(partitionEntries);
                        }
                    };

                    while (entriesRemaining > 0)
                    {
                        var chunkCount = Math.Min(entriesRemaining, BinaryMaxEntriesPerChunk);
                        tasks.Add(Task.Run(() => readChunk(chunkCount)));
                        entriesRemaining -= chunkCount;
                    }

                    await TaskSafetyHelpers.WhenAll(tasks);

                    context.Debug($"{Name}: Loaded content directory with {entries.Count} entries by {sw.ElapsedMilliseconds}ms: TotalContentSize={totalSize}, TotalUniqueSize={totalUniqueSize}, TotalReplicaCount={totalReplicaCount}, OldestContentTime={DateTime.FromFileTimeUtc(oldestContentAccessTimeUtc)}.");

                    if (entries.Count == header.EntryCount)
                    {
                        contentDirectory = new ContentMap(entries);
                    }
                    else
                    {
                        throw new CacheException($"Failed to read expected number of entries. Entries.Count={entries.Count}, Header.EntryCount={header.EntryCount}.");
                    }
                }

                if (!isLoadingBackup)
                {
                    // At this point, we've either successfully read the file or tried and failed. Either way, the existing file should now be
                    // deleted.  On a clean shutdown, it will be regenerated. On a dirty shutdown, we want it to already be gone otherwise
                    // we'll read in a file that is out-of-date.
                    _fileSystem.MoveFile(path, _backupFilePath, true);
                }
            }
            catch (Exception exception)
            {
                context.Warning($"{Name} failed to deserialize {FilePath} - starting with empty directory: {exception}");
                contentDirectory.Clear();
            }

            return(contentDirectory);
        }
        private async Task SerializeAsync()
        {
            if (ContentDirectory == null || ContentDirectory.Count == 0)
            {
                return;
            }

            var openTask         = _fileSystem.OpenSafeAsync(FilePath, FileAccess.Write, FileMode.Create, FileShare.Delete);
            var sync             = new object();
            var writeHeader      = true;
            var entries          = ContentDirectory.ToArray();
            var entriesRemaining = entries.Length;
            var startIndex       = 0;
            var tasks            = new List <Task>();

            GetSizeAndReplicaCount(out var contentSize, out var replicaCount);

            using (var stream = await openTask)
            {
                Action <int, int> writeChunk = (index, count) =>
                {
                    var endIndexExclusive = index + count;
                    var entryCount        = endIndexExclusive - index;
                    var bufferLength      = entryCount * BinaryEntrySize;
                    var partitionBuffer   = new byte[bufferLength];
                    var partitionContext  = new BufferSerializeContext(partitionBuffer);

                    for (var i = index; i < endIndexExclusive; i++)
                    {
                        ContentFileInfo entry = entries[i].Value;
                        partitionContext.SerializeFull(entries[i].Key);
                        partitionContext.Serialize(entry.FileSize);
                        partitionContext.Serialize(entry.LastAccessedFileTimeUtc);
                        partitionContext.Serialize(UnusedAccessCount);
                        partitionContext.Serialize(entry.ReplicaCount);
                    }

                    lock (sync)
                    {
                        if (writeHeader)
                        {
                            writeHeader = false;
                            var headerBuffer  = new byte[22];
                            var headerContext = new BufferSerializeContext(headerBuffer);
                            headerContext.Serialize(BinaryFormatMagicFlag);
                            headerContext.Serialize(BinaryFormatVersion);
                            headerContext.Serialize(entries.Length);
                            headerContext.Serialize(contentSize);
                            headerContext.Serialize(replicaCount);
                            stream.Write(headerBuffer, 0, headerBuffer.Length);
                        }

                        stream.Write(partitionBuffer, 0, partitionContext.Offset);
                    }
                };

                while (startIndex < entries.Length)
                {
                    var i          = startIndex;
                    var chunkCount = Math.Min(entriesRemaining, BinaryMaxEntriesPerChunk);
                    tasks.Add(Task.Run(() => writeChunk(i, chunkCount)));
                    startIndex       += chunkCount;
                    entriesRemaining -= chunkCount;
                }

                await TaskSafetyHelpers.WhenAll(tasks);

                Contract.Assert(startIndex == entries.Length);
            }
        }
Beispiel #8
0
        public void DedupHashFile
        (
            [Required] string[] path,
            [Required] string hashType,
            [DefaultValue(false)] bool chunks,
            [DefaultValue(false)] bool childNodes,
            [DefaultValue(FileSystemConstants.FileIOBufferSize)] int bufferSize,
            [DefaultValue((long)0)] long startOffset
        )
        {
            Initialize();

            _displayChunks     = chunks;
            _displayChildNodes = childNodes;

            if (!Enum.TryParse(hashType, out HashType dedupHashType))
            {
                throw new ArgumentException($"HashType couldn't be inferred - {hashType}. Valid HashType is required.");
            }

            var paths = new List <AbsolutePath>();

            foreach (AbsolutePath root in path.Select(p => new AbsolutePath(Path.GetFullPath(p))))
            {
                if (_fileSystem.DirectoryExists(root))
                {
                    paths.AddRange(_fileSystem.EnumerateFiles(root, EnumerateOptions.Recurse).Select(fileInfo => fileInfo.FullPath));
                }
                else if (_fileSystem.FileExists(root))
                {
                    paths.Add(root);
                }
                else
                {
                    throw new ArgumentException("given path is not an existing file or directory");
                }
            }

            var buffer = new byte[bufferSize];

            using (var contentHasher = new DedupNodeOrChunkHashAlgorithm(new ManagedChunker(dedupHashType.GetChunkerConfiguration())))
            {
                foreach (var p in paths)
                {
                    contentHasher.Initialize();
                    TaskSafetyHelpers.SyncResultOnThreadPool(async() =>
                    {
                        using (Stream fs = _fileSystem.OpenReadOnly(p, FileShare.Read | FileShare.Delete))
                        {
                            fs.Position = startOffset;
                            int bytesRead;
                            while ((bytesRead = await fs.ReadAsync(buffer, 0, buffer.Length)) > 0)
                            {
                                contentHasher.TransformBlock(buffer, 0, bytesRead, null, 0);
                            }
                            contentHasher.TransformFinalBlock(new byte[0], 0, 0);
                            DedupNode root = contentHasher.GetNode();
                            ulong offset   = 0;
                            LogNode(true, string.Empty, root, p, ref offset);
                        }

                        return(0);
                    });
                }
            }

            _logger.Always("Totals:");
            _logger.Always($"Bytes: Unique={_uniqueBytes:N0} Total={_totalBytes:N0}");
            _logger.Always($"Chunks: Unique={_allChunks.Count:N0} Total={_totalChunks:N0}");
            _logger.Always($"Nodes: Unique={_allNodes.Count:N0} Total={_totalNodes:N0}");
        }