public async Task UpdateAsync <T>(T item, IGraphRequestContext graphRequestContext) where T : class
 {
     var client = Get <T>(graphRequestContext);
     var idx    = IndexAction.Merge(item);
     await client.Documents.IndexAsync(new IndexBatch <T>(
                                           new List <IndexAction <T> > {
         idx
     }));
 }
Пример #2
0
            [InlineData(4, 8, 15, 0)] // 4,        8 + 4 = 12 is greater than 10 so 8 is the batch size.
            public async Task RespectsAzureSearchBatchSize(int documentsPerId, int batchSize, int fullPushes, int partialPushes)
            {
                var changeCount    = 30;
                var expectedPushes = fullPushes + partialPushes;

                Config.AzureSearchBatchSize = 10;

                IndexActions = new IndexActions(
                    new List <IndexAction <KeyedDocument> >(
                        Enumerable
                        .Range(0, documentsPerId)
                        .Select(x => IndexAction.Merge(new KeyedDocument()))),
                    new List <IndexAction <KeyedDocument> >(),
                    new ResultAndAccessCondition <VersionListData>(
                        new VersionListData(new Dictionary <string, VersionPropertiesData>()),
                        new Mock <IAccessCondition>().Object));

                AddChanges(changeCount);

                await Target.ExecuteAsync();

                VerifyCompletedTelemetry(JobOutcome.Success);
                VerifyAllIdsAreProcessed(changeCount);
                IndexActionBuilder.Verify(
                    x => x.UpdateAsync(
                        It.IsAny <string>(),
                        It.IsAny <Func <SearchFilters, KeyedDocument> >()),
                    Times.Exactly(changeCount));
                BatchPusher.Verify(
                    x => x.EnqueueIndexActions(It.IsAny <string>(), It.IsAny <IndexActions>()),
                    Times.Exactly(changeCount));
                BatchPusher.Verify(x => x.TryFinishAsync(), Times.Exactly(expectedPushes));
                BatchPusher.Verify(x => x.TryPushFullBatchesAsync(), Times.Never);
                SystemTime.Verify(x => x.Delay(It.IsAny <TimeSpan>()), Times.Exactly(expectedPushes - 1));
                DownloadDataClient.Verify(
                    x => x.ReplaceLatestIndexedAsync(
                        NewDownloadData,
                        It.Is <IAccessCondition>(a => a.IfMatchETag == OldDownloadResult.Metadata.ETag)),
                    Times.Once);

                Assert.Equal(
                    fullPushes,
                    FinishedBatches.Count(b => b.Sum(ia => ia.Search.Count) == batchSize));
                Assert.Equal(
                    partialPushes,
                    FinishedBatches.Count(b => b.Sum(ia => ia.Search.Count) != batchSize));
                Assert.Empty(CurrentBatch);
            }
Пример #3
0
        static void Main(string[] args)
        {
            bool clear = false;

            SearchServiceClient client = new SearchServiceClient(ConfigurationManager.AppSettings["AzureSearchName"],
                                                                 new SearchCredentials(ConfigurationManager.AppSettings["AzureSearchKey"]));
            SearchIndexClient indexClient = client.Indexes.GetClient("restaurant");

            AzureMLRecommendations recos = new AzureMLRecommendations();

            recos.Init(ConfigurationManager.AppSettings["RecoUser"], ConfigurationManager.AppSettings["RecoKey"]);
            string modelId = ConfigurationManager.AppSettings["RecoModelId"];

            List <IndexAction> actions = new List <IndexAction>();

            foreach (int id in GetCurrentRestaurantIds())
            {
                List <string> recommendedIds = new List <string>();
                if (!clear)
                {
                    var r = recos.GetRecommendation(modelId, new List <string> {
                        id.ToString()
                    }, 3);
                    recommendedIds.AddRange(r.Select(i => i.Id));
                }
                actions.Add(IndexAction.Merge(new Document {
                    { "RestaurantId", id.ToString() }, { "RecommendedIds", recommendedIds }
                }));
            }

            // Assume < 1000 actions, otherwise we'd need to split it in 1000-actions batches
            try
            {
                DocumentIndexResult indexResult = indexClient.Documents.Index(new IndexBatch(actions));
                int succeeded = indexResult.Results.Where(r => r.Succeeded).Count();
                Console.WriteLine($"Indexed completed. Items: {indexResult.Results.Count}, Succeeded: {succeeded}");
            }
            catch (IndexBatchException ex)
            {
                // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                // the batch. Depending on your application, you can take compensating actions like delaying and
                // retrying. For this simple demo, we just log the failed document keys and continue.
                Console.WriteLine(
                    "Failed to index some of the documents: {0}",
                    string.Join(", ", ex.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
            }
        }
        private IndexAction <KeyedDocument> GetHijackIndexAction(
            Context context,
            NuGetVersion version,
            HijackDocumentChanges changes)
        {
            IndexAction <KeyedDocument> indexAction;

            if (changes.Delete)
            {
                indexAction = IndexAction.Delete(_hijack.Keyed(
                                                     context.PackageId,
                                                     version.ToNormalizedString()));
            }
            else if (!changes.UpdateMetadata)
            {
                indexAction = IndexAction.Merge <KeyedDocument>(_hijack.LatestFromCatalog(
                                                                    context.PackageId,
                                                                    version.ToNormalizedString(),
                                                                    lastCommitTimestamp: context.LastCommitTimestamp,
                                                                    lastCommitId: context.LastCommitId,
                                                                    changes: changes));
            }
            else
            {
                var leaf = context.GetLeaf(version);
                var normalizedVersion = VerifyConsistencyAndNormalizeVersion(context, leaf);

                indexAction = IndexAction.MergeOrUpload <KeyedDocument>(_hijack.FullFromCatalog(
                                                                            normalizedVersion,
                                                                            changes,
                                                                            leaf));
            }

            _logger.LogInformation(
                "Hijack index action prepared for {PackageId} {PackageVersion}: {IndexAction} with a {DocumentType} document.",
                context.PackageId,
                version.ToNormalizedString(),
                indexAction.ActionType,
                indexAction.Document.GetType().FullName);

            return(indexAction);
        }
Пример #5
0
            public async Task Hijack404FailureIsNotApplicable()
            {
                IndexingResults.Add(new IndexingResult(key: "hijack-doc", statusCode: 404));
                AllIndexActions.Add(new IdAndValue <IndexActions>(
                                        "NuGet.Versioning",
                                        new IndexActions(
                                            search: new List <IndexAction <KeyedDocument> >(),
                                            hijack: new List <IndexAction <KeyedDocument> >
                {
                    IndexAction.Merge(new KeyedDocument {
                        Key = "hijack-doc"
                    }),
                },
                                            versionListDataResult: new ResultAndAccessCondition <VersionListData>(
                                                new VersionListData(new Dictionary <string, VersionPropertiesData>()),
                                                Mock.Of <IAccessCondition>()))));

                var result = await Target.TryFixUpAsync(ItemList, AllIndexActions, Exception);

                Assert.False(result.Applicable);
            }
        private IndexAction <KeyedDocument> GetSearchIndexAction(
            Context context,
            SearchFilters searchFilters,
            SearchIndexChangeType changeType,
            string[] owners)
        {
            var latestFlags = _search.LatestFlagsOrNull(context.VersionLists, searchFilters);

            Guard.Assert(
                changeType == SearchIndexChangeType.Delete || latestFlags != null,
                "Either the search document is being or there is a latest version.");

            IndexAction <KeyedDocument> indexAction;

            if (changeType == SearchIndexChangeType.Delete)
            {
                indexAction = IndexAction.Delete(_search.Keyed(
                                                     context.PackageId,
                                                     searchFilters));
            }
            else if (changeType == SearchIndexChangeType.UpdateVersionList)
            {
                if (owners != null)
                {
                    // If we have owner information already fetched on behalf of another search document, send the
                    // latest owner information as well. This provides two benefits:
                    //
                    //   1. This keeps all search documents for a package ID in-sync with regards to their owners
                    //      fields.
                    //
                    //   2. This means if an admin is reflowing for the purposes of fixing up owner information, all
                    //      search documents get the benefit instead of having to reflow the latest version of each
                    //      search filter.
                    //
                    indexAction = IndexAction.Merge <KeyedDocument>(_search.UpdateVersionListAndOwnersFromCatalog(
                                                                        context.PackageId,
                                                                        searchFilters,
                                                                        lastCommitTimestamp: context.LastCommitTimestamp,
                                                                        lastCommitId: context.LastCommitId,
                                                                        versions: latestFlags.LatestVersionInfo.ListedFullVersions,
                                                                        isLatestStable: latestFlags.IsLatestStable,
                                                                        isLatest: latestFlags.IsLatest,
                                                                        owners: owners));
                }
                else
                {
                    indexAction = IndexAction.Merge <KeyedDocument>(_search.UpdateVersionListFromCatalog(
                                                                        context.PackageId,
                                                                        searchFilters,
                                                                        lastCommitTimestamp: context.LastCommitTimestamp,
                                                                        lastCommitId: context.LastCommitId,
                                                                        versions: latestFlags.LatestVersionInfo.ListedFullVersions,
                                                                        isLatestStable: latestFlags.IsLatestStable,
                                                                        isLatest: latestFlags.IsLatest));
                }
            }
            else if (IsUpdateLatest(changeType))
            {
                var leaf = context.GetLeaf(latestFlags.LatestVersionInfo.ParsedVersion);
                var normalizedVersion = VerifyConsistencyAndNormalizeVersion(context, leaf);
                indexAction = IndexAction.MergeOrUpload <KeyedDocument>(_search.UpdateLatestFromCatalog(
                                                                            searchFilters,
                                                                            latestFlags.LatestVersionInfo.ListedFullVersions,
                                                                            latestFlags.IsLatestStable,
                                                                            latestFlags.IsLatest,
                                                                            normalizedVersion,
                                                                            latestFlags.LatestVersionInfo.FullVersion,
                                                                            leaf,
                                                                            owners));
            }
            else
            {
                throw new NotImplementedException($"The change type '{changeType}' is not supported.");
            }

            _logger.LogInformation(
                "Search index action prepared for {PackageId} {SearchFilters}: {IndexAction} with a {DocumentType} document.",
                context.PackageId,
                searchFilters,
                indexAction.ActionType,
                indexAction.Document.GetType().FullName);

            return(indexAction);
        }
Пример #7
0
            public async Task Search404MergeFailureIsApplicable()
            {
                ItemList.Add(new CatalogCommitItem(
                                 new Uri("https://example/catalog/0.json"),
                                 "commit-id-a",
                                 new DateTime(2020, 3, 16, 12, 5, 0, DateTimeKind.Utc),
                                 new string[0],
                                 new[] { Schema.DataTypes.PackageDetails },
                                 new PackageIdentity("NuGet.Frameworks", NuGetVersion.Parse("1.0.0"))));
                ItemList.Add(new CatalogCommitItem(
                                 new Uri("https://example/catalog/1.json"),
                                 "commit-id-a",
                                 new DateTime(2020, 3, 16, 12, 5, 0, DateTimeKind.Utc),
                                 new string[0],
                                 new[] { Schema.DataTypes.PackageDetails },
                                 new PackageIdentity("NuGet.Versioning", NuGetVersion.Parse("0.9.0-beta.1"))));

                IndexingResults.Add(new IndexingResult(key: "search-doc", statusCode: 404));
                AllIndexActions.Add(new IdAndValue <IndexActions>(
                                        "NuGet.Versioning",
                                        new IndexActions(
                                            search: new List <IndexAction <KeyedDocument> >
                {
                    IndexAction.Merge(new KeyedDocument {
                        Key = "search-doc"
                    }),
                },
                                            hijack: new List <IndexAction <KeyedDocument> >(),
                                            versionListDataResult: new ResultAndAccessCondition <VersionListData>(
                                                new VersionListData(new Dictionary <string, VersionPropertiesData>()),
                                                Mock.Of <IAccessCondition>()))));
                VersionListClient
                .Setup(x => x.ReadAsync(It.IsAny <string>()))
                .ReturnsAsync(() => new ResultAndAccessCondition <VersionListData>(
                                  new VersionListData(new Dictionary <string, VersionPropertiesData>
                {
                    { "1.0.0", new VersionPropertiesData(listed: true, semVer2: false) },
                }),
                                  Mock.Of <IAccessCondition>()));
                var leaf = new PackageDetailsCatalogLeaf
                {
                    Url             = "https://example/catalog/2.json",
                    CommitId        = "commit-id",
                    CommitTimestamp = new DateTimeOffset(2020, 3, 17, 12, 5, 0, TimeSpan.Zero),
                    Type            = CatalogLeafType.PackageDetails,
                };

                LeafFetcher
                .Setup(x => x.GetLatestLeavesAsync(
                           It.IsAny <string>(),
                           It.IsAny <IReadOnlyList <IReadOnlyList <NuGetVersion> > >()))
                .ReturnsAsync(() => new LatestCatalogLeaves(
                                  new HashSet <NuGetVersion>(),
                                  new Dictionary <NuGetVersion, PackageDetailsCatalogLeaf>
                {
                    { NuGetVersion.Parse("1.0.0"), leaf },
                }));

                var result = await Target.TryFixUpAsync(ItemList, AllIndexActions, Exception);

                Assert.True(result.Applicable, "The fix up should be applicable.");
                Assert.Equal(3, result.ItemList.Count);
                Assert.Empty(ItemList.Except(result.ItemList));

                var addedItem = Assert.Single(result.ItemList.Except(ItemList));

                Assert.Equal(leaf.Url, addedItem.Uri.AbsoluteUri);
                Assert.Equal(leaf.CommitId, addedItem.CommitId);
                Assert.Equal(leaf.CommitTimestamp, addedItem.CommitTimeStamp);
                Assert.Empty(addedItem.Types);
                Assert.Equal(Schema.DataTypes.PackageDetails, Assert.Single(addedItem.TypeUris));
                Assert.Equal(new PackageIdentity("NuGet.Versioning", NuGetVersion.Parse("1.0.0")), addedItem.PackageIdentity);
                Assert.True(addedItem.IsPackageDetails, "The generated item should be a package details item.");
                Assert.False(addedItem.IsPackageDelete, "The generated item should not be a package delete item.");
            }
Пример #8
0
        public void CanIndexDynamicDocuments()
        {
            Run(() =>
            {
                SearchIndexClient client = Data.GetSearchIndexClient();

                var batch = IndexBatch.New(new[]
                {
                    IndexAction.Upload(
                        new Document()
                    {
                        { "hotelId", "1" },
                        { "baseRate", 199.0 },
                        { "description", "Best hotel in town" },
                        { "descriptionFr", "Meilleur hôtel en ville" },
                        { "hotelName", "Fancy Stay" },
                        { "category", "Luxury" },
                        { "tags", new[] { "pool", "view", "wifi", "concierge" } },
                        { "parkingIncluded", false },
                        { "smokingAllowed", false },
                        { "lastRenovationDate", new DateTimeOffset(2010, 6, 27, 0, 0, 0, TimeSpan.FromHours(-8)) },
                        { "rating", 5 },
                        { "location", GeographyPoint.Create(47.678581, -122.131577) }
                    }),
                    IndexAction.Upload(
                        new Document()
                    {
                        { "hotelId", "2" },
                        { "baseRate", 79.99 },
                        { "description", "Cheapest hotel in town" },
                        { "descriptionFr", "Hôtel le moins cher en ville" },
                        { "hotelName", "Roach Motel" },
                        { "category", "Budget" },
                        { "tags", new[] { "motel", "budget" } },
                        { "parkingIncluded", true },
                        { "smokingAllowed", true },
                        { "lastRenovationDate", new DateTimeOffset(1982, 4, 28, 0, 0, 0, TimeSpan.Zero) },      //aka.ms/sre-codescan/disable
                        { "rating", 1 },
                        { "location", GeographyPoint.Create(49.678581, -122.131577) }
                    }),
                    IndexAction.Merge(
                        new Document()
                    {
                        { "hotelId", "3" },
                        { "baseRate", 279.99 },
                        { "description", "Surprisingly expensive" },
                        { "lastRenovationDate", null }
                    }),
                    IndexAction.Delete(keyName: "hotelId", keyValue: "4"),
                    IndexAction.MergeOrUpload(
                        new Document()
                    {
                        { "hotelId", "5" },
                        { "baseRate", Double.NaN },
                        { "hotelName", null },
                        { "tags", new string[0] }
                    })
                });

                IndexBatchException e = Assert.Throws <IndexBatchException>(() => client.Documents.Index(batch));
                AssertIsPartialFailure(e, "3");

                Assert.Equal(5, e.IndexingResults.Count);

                AssertIndexActionSucceeded("1", e.IndexingResults[0], 201);
                AssertIndexActionSucceeded("2", e.IndexingResults[1], 201);
                AssertIndexActionFailed("3", e.IndexingResults[2], "Document not found.", 404);
                AssertIndexActionSucceeded("4", e.IndexingResults[3], 200);
                AssertIndexActionSucceeded("5", e.IndexingResults[4], 201);

                SearchTestUtilities.WaitForIndexing();

                Assert.Equal(3L, client.Documents.Count());
            });
        }
Пример #9
0
        public void CanIndexStaticallyTypedDocuments()
        {
            Run(() =>
            {
                SearchIndexClient client = Data.GetSearchIndexClient();

                var batch = IndexBatch.New(new[]
                {
                    IndexAction.Upload(
                        new Hotel()
                    {
                        HotelId            = "1",
                        BaseRate           = 199.0,
                        Description        = "Best hotel in town",
                        DescriptionFr      = "Meilleur hôtel en ville",
                        HotelName          = "Fancy Stay",
                        Category           = "Luxury",
                        Tags               = new[] { "pool", "view", "wifi", "concierge" },
                        ParkingIncluded    = false,
                        SmokingAllowed     = false,
                        LastRenovationDate = new DateTimeOffset(2010, 6, 27, 0, 0, 0, TimeSpan.FromHours(-8)),
                        Rating             = 5,
                        Location           = GeographyPoint.Create(47.678581, -122.131577)
                    }),
                    IndexAction.Upload(
                        new Hotel()
                    {
                        HotelId            = "2",
                        BaseRate           = 79.99,
                        Description        = "Cheapest hotel in town",
                        DescriptionFr      = "Hôtel le moins cher en ville",
                        HotelName          = "Roach Motel",
                        Category           = "Budget",
                        Tags               = new[] { "motel", "budget" },
                        ParkingIncluded    = true,
                        SmokingAllowed     = true,
                        LastRenovationDate = new DateTimeOffset(1982, 4, 28, 0, 0, 0, TimeSpan.Zero),       //aka.ms/sre-codescan/disable
                        Rating             = 1,
                        Location           = GeographyPoint.Create(49.678581, -122.131577)
                    }),
                    IndexAction.Merge(
                        new Hotel()
                    {
                        HotelId            = "3",
                        BaseRate           = 279.99,
                        Description        = "Surprisingly expensive",
                        LastRenovationDate = null
                    }),
                    IndexAction.Delete(new Hotel()
                    {
                        HotelId = "4"
                    }),
                    IndexAction.MergeOrUpload(
                        new Hotel()
                    {
                        HotelId   = "5",
                        BaseRate  = Double.NaN,
                        HotelName = null,
                        Tags      = new string[0]
                    })
                });

                IndexBatchException e = Assert.Throws <IndexBatchException>(() => client.Documents.Index(batch));
                AssertIsPartialFailure(e, "3");

                Assert.Equal(5, e.IndexingResults.Count);

                AssertIndexActionSucceeded("1", e.IndexingResults[0], 201);
                AssertIndexActionSucceeded("2", e.IndexingResults[1], 201);
                AssertIndexActionFailed("3", e.IndexingResults[2], "Document not found.", 404);
                AssertIndexActionSucceeded("4", e.IndexingResults[3], 200);
                AssertIndexActionSucceeded("5", e.IndexingResults[4], 201);

                SearchTestUtilities.WaitForIndexing();

                Assert.Equal(3, client.Documents.Count());
            });
        }
Пример #10
0
        public static async Task CleanExpiredRecordsAsync(
            SearchIndexClient searchIndexClient,
            EkProductSourceEnum productSource,
            long lastUpdateUtcTimestamp,
            PriceListSynchronizationStatistics statistics,
            ILogger logger)
        {
            const int BatchSize        = 1_000;
            string    lastProcessedKey = null;

            while (true)
            {
                var filterBuilder = new StringBuilder();
                filterBuilder.Append($"source eq {(int)productSource} and updatedOnUtcTimestamp ne {lastUpdateUtcTimestamp}");
                if (lastProcessedKey != null)
                {
                    filterBuilder.Append($" and key gt '{lastProcessedKey}'");
                }

                var searchParameters = new SearchParameters()
                {
                    Top    = BatchSize,
                    Filter = filterBuilder.ToString(),
                    Select = new List <string>()
                    {
                        "key"
                    },
                    OrderBy = new List <string>()
                    {
                        "key asc",
                    },
                };

                var searchResult = await searchIndexClient.Documents.SearchAsync(null, searchParameters);

                var documents = searchResult.Results
                                .Select(x => x.Document)
                                .ToArray();

                if (documents.Length > 0)
                {
                    // remove records
                    var indexBatch = IndexBatch.New(documents.Select(x => IndexAction.Merge(x)));
                    try
                    {
                        await searchIndexClient.Documents.IndexAsync(indexBatch);
                    }
                    catch (IndexBatchException ex)
                    {
                        var reasons = string.Join(", ", ex.IndexingResults
                                                  .Where(x => !x.Succeeded)
                                                  .Select(x => $"{x.Key}-{x.ErrorMessage}"));
                        var message = $"Failed to index products: {reasons}.";

                        throw new InvalidOperationException(message);
                    }

                    // use last processed key as additional filter since indexing takes some time
                    lastProcessedKey = (string)documents.Last()["key"];

                    logger.LogInformation(LoggingEvents.Synchronization, $"Deleted: {documents.Length}.");
                    statistics.Removed += documents.Length;
                }
                else
                {
                    logger.LogInformation(LoggingEvents.Synchronization, "No expired records.");

                    break;
                }
            }
        }
Пример #11
0
        public async Task SearchForAbsentThumbnailsAsync()
        {
            var totalProcessed      = 0;
            var totalThumbnailFound = 0;

            try
            {
                const int BatchSize = 10;
                using (var indexClient = _ekSearchManagementClient.CreateSearchIndexClient())
                {
                    indexClient.IndexName = _ekSearchManagementClient.ProductsIndexName;

                    string lastProcessedKey = null;
                    while (true)
                    {
                        var filterBuilder = new StringBuilder();
                        filterBuilder.Append("isThumbnailSearchProvided eq null and thumbnailUrl eq null");
                        if (lastProcessedKey != null)
                        {
                            filterBuilder.Append($" and key gt '{lastProcessedKey}'");
                        }

                        var searchParameters = new SearchParameters()
                        {
                            Top     = BatchSize,
                            Filter  = filterBuilder.ToString(),
                            OrderBy = new List <string>()
                            {
                                "key asc",
                            },
                        };

                        var searchResult = await indexClient.Documents.SearchAsync <IndexProduct>(null, searchParameters);

                        var records = searchResult.Results
                                      .Select(x => x.Document)
                                      .ToArray();

                        if (records.Length > 0)
                        {
                            // search for thumbnails
                            var searchTasks = records.Select(record =>
                                                             Task.Run(async() =>
                            {
                                var document = new Document()
                                {
                                    ["key"] = record.Key,
                                };

                                var fileInfo = await GetPartNumberThumbnailAsync(
                                    brandName: record.BrandName,
                                    partNumber: record.PartNumber);
                                if (fileInfo?.Url != null)
                                {
                                    document["thumbnailUrl"] = fileInfo.Url;

                                    Interlocked.Increment(ref totalThumbnailFound);
                                }

                                document["isThumbnailSearchProvided"] = true;

                                Interlocked.Increment(ref totalProcessed);

                                return(document);
                            }));

                            var documents = await Task.WhenAll(searchTasks);

                            // index flags and thumbnail urls if found
                            var indexBatch = IndexBatch.New(documents.Select(x => IndexAction.Merge(x)));
                            try
                            {
                                await indexClient.Documents.IndexAsync(indexBatch);
                            }
                            catch (IndexBatchException ex)
                            {
                                var reasons = string.Join(", ", ex.IndexingResults
                                                          .Where(x => !x.Succeeded)
                                                          .Select(x => $"{x.Key}-{x.ErrorMessage}"));
                                var message = $"Failed to index products: {reasons}.";

                                throw new InvalidOperationException(message);
                            }

                            // use last processed key as additional filter since indexing takes some time
                            lastProcessedKey = records.Last().Key;

                            var foundPercentage = (decimal)totalThumbnailFound / totalProcessed;
                            _logger.LogInformation($"Processed: {totalProcessed}, found: {totalThumbnailFound}, {foundPercentage:P}.");
                        }
                        else
                        {
                            _logger.LogInformation("No more records.");

                            break;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                _logger.LogError(LoggingEvents.UnhandledException, ex, null);
                await _notificationManager.SendWorkerMessageAsync($"Search for absent thumbnails failed, processed: {totalProcessed}, found: {totalThumbnailFound}, {ex.Message}");

                throw;
            }

            await _notificationManager.SendWorkerMessageAsync($"Search for absent thumbnails succeed, processed: {totalProcessed}, found: {totalThumbnailFound}.");
        }
Пример #12
0
            public Facts(ITestOutputHelper output)
            {
                DatabaseOwnerFetcher     = new Mock <IDatabaseAuxiliaryDataFetcher>();
                OwnerDataClient          = new Mock <IOwnerDataClient>();
                OwnerSetComparer         = new Mock <IDataSetComparer>();
                SearchDocumentBuilder    = new Mock <ISearchDocumentBuilder>();
                SearchIndexActionBuilder = new Mock <ISearchIndexActionBuilder>();
                Pusher           = new Mock <IBatchPusher>();
                Options          = new Mock <IOptionsSnapshot <AzureSearchJobConfiguration> >();
                TelemetryService = new Mock <IAzureSearchTelemetryService>();
                Logger           = output.GetLogger <UpdateOwnersCommand>();

                Configuration = new AzureSearchJobConfiguration
                {
                    MaxConcurrentBatches = 1,
                };
                DatabaseResult = new SortedDictionary <string, SortedSet <string> >();
                StorageResult  = new ResultAndAccessCondition <SortedDictionary <string, SortedSet <string> > >(
                    new SortedDictionary <string, SortedSet <string> >(),
                    new Mock <IAccessCondition>().Object);
                Changes      = new SortedDictionary <string, string[]>();
                IndexActions = new IndexActions(
                    new List <IndexAction <KeyedDocument> > {
                    IndexAction.Merge(new KeyedDocument())
                },
                    new List <IndexAction <KeyedDocument> > {
                    IndexAction.Merge(new KeyedDocument())
                },
                    new ResultAndAccessCondition <VersionListData>(
                        new VersionListData(new Dictionary <string, VersionPropertiesData>()),
                        new Mock <IAccessCondition>().Object));

                Pusher.SetReturnsDefault(Task.FromResult(new BatchPusherResult()));
                Options
                .Setup(x => x.Value)
                .Returns(() => Configuration);
                DatabaseOwnerFetcher
                .Setup(x => x.GetPackageIdToOwnersAsync())
                .ReturnsAsync(() => DatabaseResult);
                OwnerDataClient
                .Setup(x => x.ReadLatestIndexedAsync())
                .ReturnsAsync(() => StorageResult);
                OwnerSetComparer
                .Setup(x => x.CompareOwners(
                           It.IsAny <SortedDictionary <string, SortedSet <string> > >(),
                           It.IsAny <SortedDictionary <string, SortedSet <string> > >()))
                .Returns(() => Changes);
                SearchIndexActionBuilder
                .Setup(x => x.UpdateAsync(It.IsAny <string>(), It.IsAny <Func <SearchFilters, KeyedDocument> >()))
                .ReturnsAsync(() => IndexActions);

                Target = new UpdateOwnersCommand(
                    DatabaseOwnerFetcher.Object,
                    OwnerDataClient.Object,
                    OwnerSetComparer.Object,
                    SearchDocumentBuilder.Object,
                    SearchIndexActionBuilder.Object,
                    () => Pusher.Object,
                    Options.Object,
                    TelemetryService.Object,
                    Logger);
            }
Пример #13
0
        public async Task <IndexActions> UpdateAsync(string packageId, Func <SearchFilters, KeyedDocument> buildDocument)
        {
            var versionListDataResult = await _versionListDataClient.ReadAsync(packageId);

            var versionLists = new VersionLists(versionListDataResult.Result);

            /// Update all of the search documents that exist for this package ID with the provided document builder.
            /// Here are some examples of different search filter combinations that could occur.
            ///
            /// Example #1: 1.0.0 (listed)
            ///
            ///   A stable SemVer 1.0.0 package matches all search filters, so one index action will be produced for
            ///   each search document. That is four in total. All of these search documents have the same latest
            ///   version: 1.0.0.
            ///
            /// Example #2: 1.0.0 (unlisted), 2.0.0 (unlisted)
            ///
            ///   There are no search documents at all in this case since there is no listed version. No index actions
            ///   are produced in this case.
            ///
            /// Example #3: 1.0.0-beta (listed), 2.0.0-beta.1 (listed)
            ///
            ///   All of the versions are prerelease so there are no search documents for "stable" search filters. There
            ///   two search documents to be updated, one for <see cref="SearchFilters.IncludePrerelease"/> and one for
            ///   <see cref="SearchFilters.IncludePrereleaseAndSemVer2"/>. The latest version for each of these two
            ///   documents is different.
            var search        = new List <IndexAction <KeyedDocument> >();
            var searchFilters = new List <SearchFilters>();

            foreach (var searchFilter in DocumentUtilities.AllSearchFilters)
            {
                // Determine if there is a document for this ID and search filter.
                if (versionLists.GetLatestVersionInfoOrNull(searchFilter) == null)
                {
                    continue;
                }

                var document    = buildDocument(searchFilter);
                var indexAction = IndexAction.Merge(document);
                search.Add(indexAction);
                searchFilters.Add(searchFilter);
            }

            _logger.LogInformation(
                "Package ID {PackageId} has {Count} search document changes for search filters: {SearchFilters}",
                packageId,
                searchFilters.Count,
                searchFilters);

            // No changes are made to the hijack index.
            var hijack = new List <IndexAction <KeyedDocument> >();

            // We never make any change to the version list but still want to push it back to storage. This will give
            // us an etag mismatch if the version list has changed. This is good because if the version list has
            // changed it's possible there is another search document that we have to update. If we didn't do this,
            // then a race condition could occur where one of the search documents for an ID would not get an update.
            var newVersionListDataResult = versionListDataResult;

            return(new IndexActions(
                       search,
                       hijack,
                       newVersionListDataResult));
        }
        /// <inheritdoc/>
        public async Task <bool> ChangeDocumentsInIndexAsync <T>(SortedDictionary <T, IndexActionType> changedDocuments, ILogger logger = null)
            where T : class
        {
            Argument.EnsureNotEmpty(nameof(changedDocuments), changedDocuments);

            if (logger == null)
            {
                logger = NullLogger.Instance;
            }

            await EnsureSearchIndexAsync <T>(logger);

            var index        = GetIndex <T>();
            var indexActions = new List <IndexAction <T> >();

            foreach (var keyValuePair in changedDocuments)
            {
                IndexAction <T> indexAction = null;

                switch (keyValuePair.Value)
                {
                case IndexActionType.Upload:
                    indexAction = IndexAction.Upload(keyValuePair.Key);
                    break;

                case IndexActionType.Delete:
                    indexAction = IndexAction.Delete(keyValuePair.Key);
                    break;

                case IndexActionType.Merge:
                    indexAction = IndexAction.Merge(keyValuePair.Key);
                    break;

                default:
                    indexAction = IndexAction.MergeOrUpload(keyValuePair.Key);
                    break;
                }

                indexActions.Add(indexAction);
            }

            var batch       = IndexBatch.New(indexActions);
            var indexClient = SearchClient.Value.Indexes.GetClient(index);

            try
            {
                var documentIndexResult = await indexClient.Documents.IndexAsync(batch);

                return(documentIndexResult.Results != null && documentIndexResult.Results.Count == changedDocuments.Count());
            }
            catch (IndexBatchException e)
            {
                // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                // the batch. Depending on your application, you can take compensating actions like delaying and
                // retrying. For this simple demo, we just log the failed document keys and continue.
                logger.Log(TraceEventType.Error, e, null, "Failed to index some of the documents: {0}",
                           string.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
            }
            catch (Exception e)
            {
                logger.Log(TraceEventType.Error, e, null, "Search index failed");
            }

            return(false);
        }
        public async Task <ServiceResult <bool> > ChangeDocumentsInIndex <T>(IEnumerable <T> documents, IEnumerable <AzureSearchIndexType> crudTypes,
                                                                             string indexName = null) where T : class
        {
            var serviceResult = new ServiceResult <bool>();

            indexName = GetIndexName <T>(indexName);

            var searchIndexCreateServiceResult = await CreateSearchIndex <T>(indexName);

            if (!searchIndexCreateServiceResult.IsStatusOk())
            {
                return(serviceResult.CopyStatus(searchIndexCreateServiceResult));
            }

            var indexActions = new List <IndexAction <T> >();

            var documentCounter = 0;

            foreach (var document in documents)
            {
                var             crudType    = AzureSearchIndexType.Upload;
                IndexAction <T> indexAction = null;

                if (crudTypes.Count() > documentCounter)
                {
                    crudType = crudTypes.ElementAt(documentCounter);
                }
                else
                {
                    crudType = crudTypes.First();
                }

                switch (crudType)
                {
                case AzureSearchIndexType.Upload:
                    indexAction = IndexAction.Upload(document);
                    break;

                case AzureSearchIndexType.Delete:
                    indexAction = IndexAction.Delete(document);
                    break;

                case AzureSearchIndexType.Merge:
                    indexAction = IndexAction.Merge(document);
                    break;

                default:
                    indexAction = IndexAction.MergeOrUpload(document);
                    break;
                }

                indexActions.Add(indexAction);

                documentCounter++;
            }

            var batch       = IndexBatch.New(indexActions);
            var indexClient = _serviceClient.Indexes.GetClient(indexName);

            try
            {
                var documentIndexResult = await indexClient.Documents.IndexAsync(batch);

                serviceResult.Data = documentIndexResult.Results != null && documentIndexResult.Results.Count == documents.Count();
            }
            catch (IndexBatchException e)
            {
                // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                // the batch. Depending on your application, you can take compensating actions like delaying and
                // retrying. For this simple demo, we just log the failed document keys and continue.
                serviceResult.SetException(e);
                _logger.Log(TraceEventType.Error, e, null, "Failed to index some of the documents: {0}",
                            string.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
            }
            catch (Exception e)
            {
                serviceResult.SetException(e);
                _logger.Log(TraceEventType.Error, e, null, "Search index failed");
            }

            return(serviceResult);
        }
Пример #16
0
            public Facts(ITestOutputHelper output)
            {
                AuxiliaryFileClient          = new Mock <IAuxiliaryFileClient>();
                DatabaseFetcher              = new Mock <IDatabaseAuxiliaryDataFetcher>();
                DownloadDataClient           = new Mock <IDownloadDataClient>();
                DownloadSetComparer          = new Mock <IDownloadSetComparer>();
                DownloadTransferrer          = new Mock <IDownloadTransferrer>();
                PopularityTransferDataClient = new Mock <IPopularityTransferDataClient>();
                SearchDocumentBuilder        = new Mock <ISearchDocumentBuilder>();
                IndexActionBuilder           = new Mock <ISearchIndexActionBuilder>();
                BatchPusher      = new Mock <IBatchPusher>();
                SystemTime       = new Mock <ISystemTime>();
                FeatureFlags     = new Mock <IFeatureFlagService>();
                Options          = new Mock <IOptionsSnapshot <Auxiliary2AzureSearchConfiguration> >();
                TelemetryService = new Mock <IAzureSearchTelemetryService>();
                Logger           = output.GetLogger <Auxiliary2AzureSearchCommand>();

                Config = new Auxiliary2AzureSearchConfiguration
                {
                    AzureSearchBatchSize            = 10,
                    MaxConcurrentBatches            = 1,
                    MaxConcurrentVersionListWriters = 1,
                    EnablePopularityTransfers       = true,
                    MinPushPeriod = TimeSpan.FromSeconds(5),
                };
                Options.Setup(x => x.Value).Returns(() => Config);

                OldDownloadData   = new DownloadData();
                OldDownloadResult = Data.GetAuxiliaryFileResult(OldDownloadData, "download-data-etag");
                DownloadDataClient
                .Setup(x => x.ReadLatestIndexedAsync(It.IsAny <IAccessCondition>(), It.IsAny <StringCache>()))
                .ReturnsAsync(() => OldDownloadResult);
                NewDownloadData = new DownloadData();
                AuxiliaryFileClient.Setup(x => x.LoadDownloadDataAsync()).ReturnsAsync(() => NewDownloadData);

                Changes = new SortedDictionary <string, long>();
                DownloadSetComparer
                .Setup(x => x.Compare(It.IsAny <DownloadData>(), It.IsAny <DownloadData>()))
                .Returns(() => Changes);

                OldTransfers      = new PopularityTransferData();
                OldTransferResult = new AuxiliaryFileResult <PopularityTransferData>(
                    modified: true,
                    data: OldTransfers,
                    metadata: new AuxiliaryFileMetadata(
                        DateTimeOffset.UtcNow,
                        TimeSpan.Zero,
                        fileSize: 0,
                        etag: "etag"));
                PopularityTransferDataClient
                .Setup(x => x.ReadLatestIndexedAsync(It.IsAny <IAccessCondition>(), It.IsAny <StringCache>()))
                .ReturnsAsync(OldTransferResult);

                NewTransfers = new PopularityTransferData();
                DatabaseFetcher
                .Setup(x => x.GetPopularityTransfersAsync())
                .ReturnsAsync(NewTransfers);

                TransferChanges = new SortedDictionary <string, long>(StringComparer.OrdinalIgnoreCase);
                DownloadTransferrer
                .Setup(x => x.UpdateDownloadTransfers(
                           It.IsAny <DownloadData>(),
                           It.IsAny <SortedDictionary <string, long> >(),
                           It.IsAny <PopularityTransferData>(),
                           It.IsAny <PopularityTransferData>()))
                .Returns(TransferChanges);

                IndexActions = new IndexActions(
                    new List <IndexAction <KeyedDocument> > {
                    IndexAction.Merge(new KeyedDocument())
                },
                    new List <IndexAction <KeyedDocument> >(),
                    new ResultAndAccessCondition <VersionListData>(
                        new VersionListData(new Dictionary <string, VersionPropertiesData>()),
                        Mock.Of <IAccessCondition>()));
                ProcessedIds = new ConcurrentBag <string>();
                IndexActionBuilder
                .Setup(x => x.UpdateAsync(It.IsAny <string>(), It.IsAny <Func <SearchFilters, KeyedDocument> >()))
                .ReturnsAsync(() => IndexActions)
                .Callback <string, Func <SearchFilters, KeyedDocument> >((id, b) =>
                {
                    ProcessedIds.Add(id);
                    b(SearchFilters.IncludePrereleaseAndSemVer2);
                });

                // When pushing, delay for a little bit of time so the stopwatch has some measurable duration.
                PushedIds       = new ConcurrentBag <string>();
                CurrentBatch    = new ConcurrentBag <IndexActions>();
                FinishedBatches = new ConcurrentBag <List <IndexActions> >();
                BatchPusher
                .Setup(x => x.EnqueueIndexActions(It.IsAny <string>(), It.IsAny <IndexActions>()))
                .Callback <string, IndexActions>((id, indexActions) =>
                {
                    CurrentBatch.Add(indexActions);
                    PushedIds.Add(id);
                });
                BatchPusher
                .Setup(x => x.TryFinishAsync())
                .Returns(async() =>
                {
                    await Task.Delay(TimeSpan.FromMilliseconds(1));
                    return(new BatchPusherResult());
                })
                .Callback(() =>
                {
                    FinishedBatches.Add(CurrentBatch.ToList());
                    CurrentBatch = new ConcurrentBag <IndexActions>();
                });

                FeatureFlags.Setup(x => x.IsPopularityTransferEnabled()).Returns(true);

                Target = new UpdateDownloadsCommand(
                    AuxiliaryFileClient.Object,
                    DatabaseFetcher.Object,
                    DownloadDataClient.Object,
                    DownloadSetComparer.Object,
                    DownloadTransferrer.Object,
                    PopularityTransferDataClient.Object,
                    SearchDocumentBuilder.Object,
                    IndexActionBuilder.Object,
                    () => BatchPusher.Object,
                    SystemTime.Object,
                    FeatureFlags.Object,
                    Options.Object,
                    TelemetryService.Object,
                    Logger);
            }