/// <summary> /// Will indicate that the search results should return facet information for the given field. /// </summary> /// <param name="fieldName"></param> public void AddFieldFacet(string fieldName) { Verify.ArgumentNotNullOrEmpty(fieldName, nameof(fieldName)); if (Facets.Any(f => f.Key == fieldName)) { return; } var field = SearchDocumentBuilder.GetDefaultDocumentFields() .FirstOrDefault(f => f.Name == fieldName); if (field == null) { field = SearchFacade.DocumentSources.SelectMany(f => f.CustomFields) .FirstOrDefault(f => f.Name == fieldName); Verify.IsNotNull(field, $"Failed to find a document field by name '{fieldName}'"); } Verify.IsNotNull(field.Facet, $"Faceted search is not enabled for the field '{fieldName}'"); Facets.Add(new KeyValuePair <string, DocumentFieldFacet>( fieldName, field.Facet)); }
private SearchDocument FromData(IData data, CultureInfo culture) { using (new DataScope(culture)) { string label = data.GetLabel(); if (string.IsNullOrEmpty(label)) { // Having a label is a requirement for a data item to be searchable return(null); } var docBuilder = new SearchDocumentBuilder(); docBuilder.SetDataType(_interfaceType); string documentId = GetDocumentId(data); if (InternalUrls.DataTypeSupported(_interfaceType) && (!_isPublishable || data.DataSourceId.PublicationScope == PublicationScope.Published)) { docBuilder.Url = InternalUrls.TryBuildInternalUrl(data.ToDataReference()); } docBuilder.CrawlData(data); var entityToken = GetConsoleEntityToken(data); if (entityToken == null) { Log.LogWarning(LogTitle, $"Failed to obtain an entity token for a data item of type '{data.DataSourceId.InterfaceType}'"); return(null); } return(docBuilder.BuildDocument(Name, documentId, label, null, entityToken)); } }
private SearchDocument FromPage(IPage page, EntityToken entityToken, Dictionary <Tuple <Guid, Guid>, List <IData> > allMetaData) { string label = page.MenuTitle; if (string.IsNullOrWhiteSpace(label)) { label = page.Title; } bool isPublished = page.DataSourceId.PublicationScope == PublicationScope.Published; string documentId = GetDocumentId(page); var docBuilder = new SearchDocumentBuilder(_docBuilderExtensions); docBuilder.SetDataType(typeof(IPage)); docBuilder.CrawlData(page); using (new DataConnection(page.DataSourceId.PublicationScope, page.DataSourceId.LocaleScope)) { if (isPublished) { docBuilder.Url = PageUrls.BuildUrl(page, UrlKind.Internal); } var placeholders = PageManager.GetPlaceholderContent(page.Id, page.VersionId); placeholders.ForEach(pl => docBuilder.CrawlData(pl, true)); List <IData> metaData; if (allMetaData != null) { allMetaData.TryGetValue(new Tuple <Guid, Guid>(page.Id, page.VersionId), out metaData); } else { metaData = GetMetaData(page.Id, page.VersionId, page.DataSourceId.PublicationScope, page.DataSourceId.LocaleScope); } try { metaData?.ForEach(pageMetaData => docBuilder.CrawlData(pageMetaData)); } catch (Exception ex) { Log.LogWarning(LogTitle, ex); } } if (!string.IsNullOrEmpty(page.UrlTitle) && !UrlFormattersPluginFacade.FormatUrl(page.Title, true).Equals(page.UrlTitle, StringComparison.OrdinalIgnoreCase) && !UrlFormattersPluginFacade.FormatUrl(page.Title, false).Equals(page.UrlTitle, StringComparison.OrdinalIgnoreCase)) { docBuilder.TextParts.Add(page.UrlTitle); } return(docBuilder.BuildDocument(Name, documentId, label, null, entityToken)); }
public virtual void Populate(SearchDocumentBuilder searchDocumentBuilder, IData data) { if (data is IMediaFile mediaFile && !string.IsNullOrWhiteSpace(mediaFile.MimeType) && FrontendSearchSupported(mediaFile.MimeType)) { searchDocumentBuilder.Url = MediaUrls.BuildUrl(mediaFile); } }
public async Task OverridesDownloadCounts() { DownloadSetComparer .Setup(c => c.Compare(It.IsAny <DownloadData>(), It.IsAny <DownloadData>())) .Returns <DownloadData, DownloadData>((oldData, newData) => { return(new SortedDictionary <string, long>( newData.ToDictionary(d => d.Key, d => d.Value.Total), StringComparer.OrdinalIgnoreCase)); }); NewDownloadData.SetDownloadCount("A", "1.0.0", 12); NewDownloadData.SetDownloadCount("A", "2.0.0", 34); NewDownloadData.SetDownloadCount("B", "3.0.0", 5); NewDownloadData.SetDownloadCount("B", "4.0.0", 4); NewDownloadData.SetDownloadCount("C", "5.0.0", 2); NewDownloadData.SetDownloadCount("C", "6.0.0", 3); DownloadOverrides["A"] = 55; DownloadOverrides["b"] = 66; await Target.ExecuteAsync(); // Documents should have new data with overriden downloads. SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("A", SearchFilters.IncludePrereleaseAndSemVer2, 55), Times.Once); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("B", SearchFilters.IncludePrereleaseAndSemVer2, 66), Times.Once); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("C", SearchFilters.IncludePrereleaseAndSemVer2, 5), Times.Once); // Downloads auxiliary file should have new data without overriden downloads. DownloadDataClient.Verify( c => c.ReplaceLatestIndexedAsync( It.Is <DownloadData>(d => d["A"].Total == 46 && d["A"]["1.0.0"] == 12 && d["A"]["2.0.0"] == 34 && d["B"].Total == 9 && d["B"]["3.0.0"] == 5 && d["B"]["4.0.0"] == 4 && d["C"].Total == 5 && d["C"]["5.0.0"] == 2 && d["C"]["6.0.0"] == 3), It.IsAny <IAccessCondition>()), Times.Once); }
internal void AddDefaultFieldFacet(string fieldName) { if (Facets.Any(f => f.Key == fieldName)) { return; } Facets.Add(new KeyValuePair <string, DocumentFieldFacet>( fieldName, SearchDocumentBuilder.GetDefaultDocumentFields() .Single(f => f.Name == fieldName) .Facet)); }
public void Populate(SearchDocumentBuilder searchDocumentBuilder, IData data) { if (!(data is IMediaFile mediaFile)) { return; } var text = GetTextToIndex(mediaFile); if (string.IsNullOrWhiteSpace(text)) { return; } searchDocumentBuilder.TextParts.Add(text); searchDocumentBuilder.Url = MediaUrls.BuildUrl(mediaFile, UrlKind.Internal); }
private SearchDocument FromMediaFile(IMediaFile mediaFile) { string label = mediaFile.Title; if (string.IsNullOrWhiteSpace(label)) { label = mediaFile.FileName; } string documentId = mediaFile.Id.ToString(); var docBuilder = new SearchDocumentBuilder(); docBuilder.SetDataType(typeof(IMediaFile)); docBuilder.CrawlData(mediaFile); return(docBuilder.BuildDocument(Name, documentId, label, null, mediaFile.GetDataEntityToken(), null)); }
/// <summary> /// Will indicate that the search results should return facet information for the given field. /// </summary> /// <param name="fieldName"></param> /// <param name="values">The array of values that are required to appear in the search documents.</param> /// <param name="notValues">The array of values that are required not to appear in the search documents.</param> public void AddFieldFacet(string fieldName, string[] values, string[] notValues) { Verify.ArgumentNotNullOrEmpty(fieldName, nameof(fieldName)); if (Facets.Any(f => f.Key == fieldName)) { return; } var field = SearchDocumentBuilder.GetDefaultDocumentFields() .FirstOrDefault(f => f.Name == fieldName); if (field == null) { field = SearchFacade.DocumentSources.SelectMany(f => f.CustomFields) .FirstOrDefault(f => f.Name == fieldName); Verify.IsNotNull(field, $"Failed to find a document field by name '{fieldName}'"); } Verify.IsNotNull(field.Facet, $"Faceted search is not enabled for the field '{fieldName}'"); Facets.Add(new KeyValuePair <string, DocumentFieldFacet>( fieldName, field.Facet)); if ((values != null && values.Length > 0) || (notValues != null && notValues.Length > 0)) { Selection.Add(new SearchQuerySelection { FieldName = fieldName, Values = values, NotValues = notValues, Operation = field.Facet.FacetType == FacetType.SingleValue ? SearchQuerySelectionOperation.Or : SearchQuerySelectionOperation.And }); } }
private SearchDocument FromPage(IPage page, EntityToken entityToken) { string label = page.MenuTitle; if (string.IsNullOrWhiteSpace(label)) { label = page.Title; } bool isPublished = page.DataSourceId.PublicationScope == PublicationScope.Published; string documentId = GetDocumentId(page); var documentBuilder = new SearchDocumentBuilder(); documentBuilder.SetDataType(typeof(IPage)); documentBuilder.CrawlData(page); string url; using (new DataConnection(page.DataSourceId.PublicationScope, page.DataSourceId.LocaleScope)) { var placeholders = PageManager.GetPlaceholderContent(page.Id, page.VersionId); placeholders.ForEach(pl => documentBuilder.CrawlData(pl, true)); try { page.GetMetaData() .ForEach(pageMetaData => documentBuilder.CrawlData(pageMetaData)); } catch (Exception ex) { Log.LogWarning(LogTitle, ex); } url = isPublished ? PageUrls.BuildUrl(page, UrlKind.Internal) : null; } return(documentBuilder.BuildDocument(Name, documentId, label, null, entityToken, url)); }
public async Task AlwaysAppliesDownloadOverrides() { DownloadSetComparer .Setup(c => c.Compare(It.IsAny <DownloadData>(), It.IsAny <DownloadData>())) .Returns <DownloadData, DownloadData>((oldData, newData) => { var config = new Auxiliary2AzureSearchConfiguration(); var telemetry = Mock.Of <IAzureSearchTelemetryService>(); var logger = Mock.Of <ILogger <DownloadSetComparer> >(); var options = new Mock <IOptionsSnapshot <Auxiliary2AzureSearchConfiguration> >(); options.Setup(o => o.Value).Returns(config); return(new DownloadSetComparer(telemetry, options.Object, logger) .Compare(oldData, newData)); }); // Download override should be applied even if the package's downloads haven't changed. OldDownloadData.SetDownloadCount("A", "1.0.0", 1); NewDownloadData.SetDownloadCount("A", "1.0.0", 1); DownloadOverrides["A"] = 2; await Target.ExecuteAsync(); // Documents should have new data with overriden downloads. SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("A", SearchFilters.IncludePrereleaseAndSemVer2, 2), Times.Once); // Downloads auxiliary file should have new data without overriden downloads. DownloadDataClient.Verify( c => c.ReplaceLatestIndexedAsync( It.Is <DownloadData>(d => d["A"].Total == 1 && d["A"]["1.0.0"] == 1), It.IsAny <IAccessCondition>()), Times.Once); }
private SearchDocument FromMediaFile(IMediaFile mediaFile) { string label = mediaFile.Title; if (string.IsNullOrWhiteSpace(label)) { label = mediaFile.FileName; } if (string.IsNullOrEmpty(label)) { Log.LogWarning(nameof(MediaLibraryDocumentSource), $"A media file has neither FileName nor Label fields specified, Id: '{mediaFile.Id}', StoreId: '{mediaFile.StoreId}'."); return(null); } var docBuilder = new SearchDocumentBuilder(_docBuilderExtensions); docBuilder.SetDataType(typeof(IMediaFile)); docBuilder.CrawlData(mediaFile); return(docBuilder.BuildDocument(Name, mediaFile.Id.ToString(), label, null, mediaFile.GetDataEntityToken())); }
public AzureSearchCollectorLogicIntegrationTests(ITestOutputHelper output) { _utilityConfig = new CommitCollectorConfiguration { MaxConcurrentCatalogLeafDownloads = 1, }; _utilityOptions = new Mock <IOptionsSnapshot <CommitCollectorConfiguration> >(); _utilityOptions.Setup(x => x.Value).Returns(() => _utilityConfig); _config = new Catalog2AzureSearchConfiguration { MaxConcurrentBatches = 1, MaxConcurrentVersionListWriters = 1, StorageContainer = "integration-tests-container", StoragePath = "integration-tests-path", RegistrationsBaseUrl = "https://example/registrations/", GalleryBaseUrl = Data.GalleryBaseUrl, FlatContainerBaseUrl = Data.FlatContainerBaseUrl, FlatContainerContainerName = Data.FlatContainerContainerName, Scoring = new AzureSearchScoringConfiguration() }; _options = new Mock <IOptionsSnapshot <Catalog2AzureSearchConfiguration> >(); _options.Setup(x => x.Value).Returns(() => _config); _developmentConfig = new AzureSearchJobDevelopmentConfiguration(); _developmentOptions = new Mock <IOptionsSnapshot <AzureSearchJobDevelopmentConfiguration> >(); _developmentOptions.Setup(x => x.Value).Returns(() => _developmentConfig); _telemetryClient = new Mock <ITelemetryClient>(); _telemetryService = new AzureSearchTelemetryService(_telemetryClient.Object); _v3TelemetryService = new V3TelemetryService(_telemetryClient.Object); // Mock the database that is used for fetching owner information. The product code only reads // from the database so it is less important to have a realistic, stateful implementation. _entitiesContextFactory = new Mock <IEntitiesContextFactory>(); _entitiesContext = new Mock <IEntitiesContext>(); _entitiesContextFactory.Setup(x => x.CreateAsync(It.IsAny <bool>())).ReturnsAsync(() => _entitiesContext.Object); _entitiesContext.Setup(x => x.PackageRegistrations).Returns(DbSetMockFactory.Create <PackageRegistration>()); _ownerFetcher = new DatabaseAuxiliaryDataFetcher( new Mock <ISqlConnectionFactory <GalleryDbConfiguration> >().Object, _entitiesContextFactory.Object, _telemetryService, output.GetLogger <DatabaseAuxiliaryDataFetcher>()); _cloudBlobClient = new InMemoryCloudBlobClient(); _versionListDataClient = new VersionListDataClient( _cloudBlobClient, _options.Object, output.GetLogger <VersionListDataClient>()); _registrationClient = new InMemoryRegistrationClient(); _catalogClient = new InMemoryCatalogClient(); _leafFetcher = new CatalogLeafFetcher( _registrationClient, _catalogClient, _options.Object, _telemetryService, output.GetLogger <CatalogLeafFetcher>()); _baseDocumentBuilder = new BaseDocumentBuilder(_options.Object); _search = new SearchDocumentBuilder(_baseDocumentBuilder); _hijack = new HijackDocumentBuilder(_baseDocumentBuilder); _builder = new CatalogIndexActionBuilder( _versionListDataClient, _leafFetcher, _ownerFetcher, _search, _hijack, output.GetLogger <CatalogIndexActionBuilder>()); _searchIndex = new Mock <ISearchIndexClientWrapper>(); _searchDocuments = new InMemoryDocumentsOperations(); _searchIndex.Setup(x => x.Documents).Returns(() => _searchDocuments); _hijackIndex = new Mock <ISearchIndexClientWrapper>(); _hijackDocuments = new InMemoryDocumentsOperations(); _hijackIndex.Setup(x => x.Documents).Returns(() => _hijackDocuments); _fixUpEvaluator = new DocumentFixUpEvaluator( _versionListDataClient, _leafFetcher, output.GetLogger <DocumentFixUpEvaluator>()); _commitCollectorUtility = new CommitCollectorUtility( _catalogClient, _v3TelemetryService, _utilityOptions.Object, output.GetLogger <CommitCollectorUtility>()); _collector = new AzureSearchCollectorLogic( _builder, () => new BatchPusher( _searchIndex.Object, _hijackIndex.Object, _versionListDataClient, _options.Object, _developmentOptions.Object, _telemetryService, output.GetLogger <BatchPusher>()), _fixUpEvaluator, _commitCollectorUtility, _options.Object, _telemetryService, output.GetLogger <AzureSearchCollectorLogic>()); }
public PopularityTransferIntegrationTests(ITestOutputHelper output) { _featureFlags = new Mock <IFeatureFlagService>(); _telemetry = new Mock <IAzureSearchTelemetryService>(); _config = new Auxiliary2AzureSearchConfiguration { AuxiliaryDataStorageContainer = "auxiliary-container", EnablePopularityTransfers = true, StorageContainer = "storage-container", Scoring = new AzureSearchScoringConfiguration() }; var options = new Mock <IOptionsSnapshot <Auxiliary2AzureSearchConfiguration> >(); options .Setup(x => x.Value) .Returns(_config); _developmentConfig = new AzureSearchJobDevelopmentConfiguration(); var developmentOptions = new Mock <IOptionsSnapshot <AzureSearchJobDevelopmentConfiguration> >(); developmentOptions .Setup(x => x.Value) .Returns(_developmentConfig); var auxiliaryConfig = new AuxiliaryDataStorageConfiguration { AuxiliaryDataStorageContainer = "auxiliary-container", AuxiliaryDataStorageDownloadsPath = "downloads.json", AuxiliaryDataStorageExcludedPackagesPath = "excludedPackages.json", }; var auxiliaryOptions = new Mock <IOptionsSnapshot <AuxiliaryDataStorageConfiguration> >(); auxiliaryOptions .Setup(x => x.Value) .Returns(auxiliaryConfig); _auxilliaryContainer = new InMemoryCloudBlobContainer(); _storageContainer = new InMemoryCloudBlobContainer(); _blobClient = new InMemoryCloudBlobClient(); _blobClient.Containers["auxiliary-container"] = _auxilliaryContainer; _blobClient.Containers["storage-container"] = _storageContainer; var auxiliaryFileClient = new AuxiliaryFileClient( _blobClient, auxiliaryOptions.Object, _telemetry.Object, output.GetLogger <AuxiliaryFileClient>()); _newPopularityTransfers = new PopularityTransferData(); var databaseFetcher = new Mock <IDatabaseAuxiliaryDataFetcher>(); databaseFetcher .Setup(x => x.GetPopularityTransfersAsync()) .ReturnsAsync(_newPopularityTransfers); var downloadDataClient = new DownloadDataClient( _blobClient, options.Object, _telemetry.Object, output.GetLogger <DownloadDataClient>()); var popularityTransferDataClient = new PopularityTransferDataClient( _blobClient, options.Object, _telemetry.Object, output.GetLogger <PopularityTransferDataClient>()); var versionListDataClient = new VersionListDataClient( _blobClient, options.Object, output.GetLogger <VersionListDataClient>()); var downloadComparer = new DownloadSetComparer( _telemetry.Object, options.Object, output.GetLogger <DownloadSetComparer>()); var dataComparer = new DataSetComparer( _telemetry.Object, output.GetLogger <DataSetComparer>()); var downloadTransferrer = new DownloadTransferrer( dataComparer, options.Object, output.GetLogger <DownloadTransferrer>()); var baseDocumentBuilder = new BaseDocumentBuilder(options.Object); var searchDocumentBuilder = new SearchDocumentBuilder(baseDocumentBuilder); var searchIndexActionBuilder = new SearchIndexActionBuilder( versionListDataClient, output.GetLogger <SearchIndexActionBuilder>()); _searchOperations = new Mock <IDocumentsOperationsWrapper>(); _searchOperations .Setup(x => x.IndexAsync(It.IsAny <IndexBatch <KeyedDocument> >())) .Callback <IndexBatch <KeyedDocument> >(batch => { _indexedBatch = batch; }) .ReturnsAsync(new DocumentIndexResult()); var hijackIndexClient = new Mock <ISearchIndexClientWrapper>(); var searchIndexClient = new Mock <ISearchIndexClientWrapper>(); searchIndexClient .Setup(x => x.Documents) .Returns(_searchOperations.Object); var batchPusher = new BatchPusher( searchIndexClient.Object, hijackIndexClient.Object, versionListDataClient, options.Object, developmentOptions.Object, _telemetry.Object, output.GetLogger <BatchPusher>()); Func <IBatchPusher> batchPusherFactory = () => batchPusher; var time = new Mock <ISystemTime>(); _featureFlags.Setup(x => x.IsPopularityTransferEnabled()).Returns(true); _target = new UpdateDownloadsCommand( auxiliaryFileClient, databaseFetcher.Object, downloadDataClient, downloadComparer, downloadTransferrer, popularityTransferDataClient, searchDocumentBuilder, searchIndexActionBuilder, batchPusherFactory, time.Object, _featureFlags.Object, options.Object, _telemetry.Object, output.GetLogger <Auxiliary2AzureSearchCommand>()); }
public async Task AppliesTransferChanges() { var downloadChanges = new SortedDictionary <string, long>(StringComparer.OrdinalIgnoreCase); DownloadSetComparer .Setup(c => c.Compare(It.IsAny <DownloadData>(), It.IsAny <DownloadData>())) .Returns <DownloadData, DownloadData>((oldData, newData) => { return(downloadChanges); }); TransferChanges["Package1"] = 100; TransferChanges["Package2"] = 200; NewTransfers.AddTransfer("Package1", "Package2"); await Target.ExecuteAsync(); PopularityTransferDataClient .Verify( c => c.ReadLatestIndexedAsync( It.Is <IAccessCondition>(x => x.IfMatchETag == null && x.IfNoneMatchETag == null), It.IsAny <StringCache>()), Times.Once); DatabaseFetcher .Verify( d => d.GetPopularityTransfersAsync(), Times.Once); DownloadTransferrer .Verify( x => x.UpdateDownloadTransfers( NewDownloadData, downloadChanges, OldTransfers, NewTransfers), Times.Once); // Documents should be updated. SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("Package1", SearchFilters.IncludePrereleaseAndSemVer2, 100), Times.Once); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("Package2", SearchFilters.IncludePrereleaseAndSemVer2, 200), Times.Once); // Downloads auxiliary file should not include transfer changes. DownloadDataClient.Verify( c => c.ReplaceLatestIndexedAsync( It.Is <DownloadData>(d => d.Count == 0), It.IsAny <IAccessCondition>()), Times.Once); // Popularity transfers auxiliary file should have new data. PopularityTransferDataClient.Verify( c => c.ReplaceLatestIndexedAsync( It.Is <PopularityTransferData>(d => d.Count == 1 && d["Package1"].Count == 1 && d["Package1"].Contains("Package2")), It.IsAny <IAccessCondition>()), Times.Once); }
public async Task DoesNotOverrideIfDownloadsGreaterOrPackageHasNoDownloads() { DownloadSetComparer .Setup(c => c.Compare(It.IsAny <DownloadData>(), It.IsAny <DownloadData>())) .Returns <DownloadData, DownloadData>((oldData, newData) => { return(new SortedDictionary <string, long>( newData.ToDictionary(d => d.Key, d => d.Value.Total), StringComparer.OrdinalIgnoreCase)); }); NewDownloadData.SetDownloadCount("A", "1.0.0", 100); NewDownloadData.SetDownloadCount("A", "2.0.0", 200); NewDownloadData.SetDownloadCount("B", "3.0.0", 5); NewDownloadData.SetDownloadCount("B", "4.0.0", 4); NewDownloadData.SetDownloadCount("C", "5.0.0", 0); DownloadOverrides["A"] = 55; DownloadOverrides["C"] = 66; DownloadOverrides["D"] = 77; await Target.ExecuteAsync(); // Documents should have new data with overriden downloads. SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("A", SearchFilters.IncludePrereleaseAndSemVer2, 300), Times.Once); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("B", SearchFilters.IncludePrereleaseAndSemVer2, 9), Times.Once); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("B", SearchFilters.IncludePrereleaseAndSemVer2, 9), Times.Once); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("C", It.IsAny <SearchFilters>(), It.IsAny <long>()), Times.Never); SearchDocumentBuilder .Verify( b => b.UpdateDownloadCount("D", It.IsAny <SearchFilters>(), It.IsAny <long>()), Times.Never); // Downloads auxiliary file should have new data without overriden downloads. DownloadDataClient.Verify( c => c.ReplaceLatestIndexedAsync( It.Is <DownloadData>(d => d.Keys.Count() == 2 && d["A"].Total == 300 && d["A"]["1.0.0"] == 100 && d["A"]["2.0.0"] == 200 && d["B"].Total == 9 && d["B"]["3.0.0"] == 5 && d["B"]["4.0.0"] == 4), It.IsAny <IAccessCondition>()), Times.Once); }