public void Caching_search_with_apikey_does_not_pass_auth_info_to_lucene() { var searchMock = CreateMocks().Item1; var responseXml = new XDoc("response"); var luceneXml = new XDoc("lucene"); var searchQuery = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); var searchResult = new SearchResult(); searchMock.Setup(x => x.BuildQuery("foo", "", SearchQueryParserType.BestGuess, true)).Returns(searchQuery).AtMostOnce().Verifiable(); searchMock.Setup(x => x.GetCachedQuery(searchQuery)).Returns((SearchResult)null).AtMostOnce().Verifiable(); XUri luceneUriCalled = null; MockPlug.Register(Utils.Settings.LuceneMockUri, (p, v, u, req, res) => { luceneUriCalled = u; res.Return(DreamMessage.Ok(luceneXml)); }); searchMock.Setup(x => x.CacheQuery(It.Is<XDoc>(v => v == luceneXml), It.IsAny<SearchQuery>(), It.IsAny<TrackingInfo>())) .Returns(searchResult); searchMock.Setup(x => x.FormatResultSet( searchResult, It.IsAny<SetDiscriminator>(), false, It.IsAny<TrackingInfo>(), It.IsAny<Result<XDoc>>() )) .Returns(new Result<XDoc>().WithReturn(responseXml)); var response = _search .With("q", "foo") .With("apikey", Utils.Settings.ApiKey) .Get(new Result<DreamMessage>()).Wait(); Assert.IsTrue(response.IsSuccessful, response.ToErrorString()); Assert.IsNotNull(luceneUriCalled, "lucene was not called"); Assert.AreEqual(Utils.Settings.LuceneMockUri.At("compact"), luceneUriCalled.WithoutQuery(), "lucene was called at wrong uri"); Assert.IsNull(luceneUriCalled.GetParam("apiuri"), "lucene request contained an apiuri parameter"); Assert.IsNull(luceneUriCalled.GetParam("userid"), "lucene request contained a userid parameter"); Assert.AreEqual(searchQuery.LuceneQuery, luceneUriCalled.GetParam("q"), "lucene request had incorrect q parameter"); }
public void Ranked_search_hits_lucene_and_caches_result() { var searchMock = CreateSearchMock(); var luceneMock = MockPlug.Setup(Utils.Settings.LuceneMockUri); var before = DateTime.UtcNow.AddDays(-2).WithoutMilliseconds(); var since = DateTime.UtcNow.AddDays(-1).WithoutMilliseconds(); uint limit = 10; uint offset = 20; var responseXml = new XDoc("response"); var luceneXml = new XDoc("lucene"); var searchQuery = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); var searchResult = new SearchResult(); searchMock.Setup(x => x.BuildQuery("foo", "", SearchQueryParserType.BestGuess, false)).Returns(searchQuery).AtMostOnce().Verifiable(); searchMock.Setup(x => x.GetCachedQuery(searchQuery)).Returns((SearchResult)null).AtMostOnce().Verifiable(); luceneMock .Verb("GET") .At("compact") .With("wikiid", "default") .With("q", searchQuery.LuceneQuery) .Returns(luceneXml) .ExpectCalls(DreamTimes.Once()); searchMock.Setup(x => x.CacheQuery(It.Is<XDoc>(v => v == luceneXml), searchQuery, It.Is<TrackingInfo>(t => !t.QueryId.HasValue))) .Returns(searchResult) .AtMostOnce() .Verifiable(); Predicate<SetDiscriminator> discriminator = s => { return s.Limit == limit && s.Offset == offset && s.SortField == "rank" && !s.Ascending; }; searchMock.Setup(x => x.FormatResultSet( searchResult, It.Is<SetDiscriminator>(s => discriminator(s)), false, It.Is<TrackingInfo>(t => !t.QueryId.HasValue), It.IsAny<Result<XDoc>>() )) .Returns(new Result<XDoc>().WithReturn(responseXml)) .AtMostOnce() .Verifiable(); var response = _search .With("q", "foo") .With("since", since) .With("before", before) .With("limit", limit) .With("offset", offset) .Get(new Result<DreamMessage>()).Wait(); Assert.IsTrue(response.IsSuccessful, response.ToErrorString()); searchMock.VerifyAll(); luceneMock.Verify(); Assert.AreEqual(responseXml, response.ToDocument()); }
//--- Methods --- public ulong SearchAnalytics_LogQuery(SearchQuery query, string parsedQuery, uint userId, uint resultCount, ulong? previousQueryId) { var sorted = query.GetOrderedNormalizedTermString(); var hash = query.GetOrderedTermsHash(); var queryId = Catalog.NewQuery(@"/* SearchAnalytics_LogQuery */ INSERT INTO query_log (raw, sorted_terms, sorted_terms_hash, parsed, created, user_id, ref_query_id, result_count) VALUES (?QUERY, ?SORTED, ?HASH, ?PARSED, ?CREATED, ?USERID, ?REFID, ?RESULTCOUNT); SELECT LAST_INSERT_ID();") .With("QUERY", query.Raw) .With("SORTED", sorted) .With("HASH", hash) .With("PARSED", parsedQuery) .With("USERID", userId) .With("CREATED", DateTime.UtcNow) .With("REFID", previousQueryId) .With("RESULTCOUNT", resultCount) .ReadAsULong().Value; var terms = query.GetNormalizedTerms(); if(terms.Any()) { var quotedTerms = terms.Select(x => "'" + DataCommand.MakeSqlSafe(x) + "'").ToArray(); Catalog.NewQuery(string.Format(@"/* SearchAnalytics_LogQuery */ INSERT IGNORE INTO query_terms (query_term) values {0};", DbUtils.ConvertArrayToDelimittedString(',', quotedTerms.Select(x => "(" + x + ")")))).Execute(); var termIds = new List<uint>(); Catalog.NewQuery(string.Format(@"/* SearchAnalytics_LogQuery */ SELECT query_term_id from query_terms where query_term IN({0})", DbUtils.ConvertArrayToDelimittedString(',', quotedTerms))).Execute(r => { while(r.Read()) { termIds.Add(r.Read<uint>(0)); } }); Catalog.NewQuery(string.Format(@"/* SearchAnalytics_LogQuery */ INSERT IGNORE INTO query_term_map (query_term_id,query_id) values {0};", DbUtils.ConvertArrayToDelimittedString(',', termIds.Select(x => "(" + x + "," + queryId + ")")))).Execute(); } return queryId; }
public void CacheQuery_can_track_query() { // Arrange InitWithoutAdaptiveSearch(); var searchDoc = XDocFactory.From(@" <documents> <parsedQuery>content:foo</parsedQuery> <document> <id.page>123</id.page> <id.file>1234</id.file> <title>file</title> <date.edited>20100525231800</date.edited> <score>1</score> </document> </documents>", MimeType.TEXT_XML); var trackingInfo = new TrackingInfo() { PreviousQueryId = 123 }; var q = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); _cache.Setup(x => x.Set(GetKey(q), It.IsAny<SearchResult>(), It.IsAny<TimeSpan>())).AtMostOnce().Verifiable(); _session.Setup(x => x.SearchAnalytics_LogQuery(q, "content:foo", _user.ID, 1, 123)).Returns(456).AtMostOnce().Verifiable(); // Act Search.CacheQuery(searchDoc, q, trackingInfo); // Assert _session.Verify(x => x.SearchAnalytics_GetPopularityRanking(It.IsAny<string>()), Times.Never()); _session.VerifyAll(); _cache.VerifyAll(); Assert.AreEqual(456, trackingInfo.QueryId); }
public void CacheQuery_converts_xml_to_search_results_and_caches_the_data() { // Arrange InitWithoutAdaptiveSearch(); var searchDoc = XDocFactory.From(@" <documents> <parsedQuery>content:foo</parsedQuery> <document> <id.page>123</id.page> <id.file>1234</id.file> <title>file</title> <date.edited>20100525231800</date.edited> <score>1</score> </document> <document> <id.page>456</id.page> <title>page</title> <date.edited>20100429160114</date.edited> <rating.count>0</rating.count> <score>0.75</score> </document> <document> <id.page>36932</id.page> <id.comment>789</id.comment> <title>comment</title> <date.edited>20100429160323</date.edited> <rating.count>0</rating.count> <score>0.5</score> </document> <document> <id.page>36932</id.page> <id.user>432</id.user> <title>user</title> <date.edited>20100429160323</date.edited> <rating.count>0</rating.count> <score>0.5</score> </document> <document> <title>dropped item</title> <date.edited>20100429160323</date.edited> <rating.count>0</rating.count> <score>0.5</score> </document> </documents>", MimeType.TEXT_XML); var q = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); _cache.Setup(x => x.Set(GetKey(q), It.IsAny<SearchResult>(), It.IsAny<TimeSpan>())).AtMostOnce().Verifiable(); // Act var result = Search.CacheQuery(searchDoc, q, null); // Assert _session.Verify(x => x.SearchAnalytics_GetPopularityRanking(It.IsAny<string>()), Times.Never()); _cache.VerifyAll(); Assert.AreEqual(4, result.Count); Assert.AreEqual("content:foo", result.ExecutedQuery); Assert.IsTrue(result.Where(x => x.Type == SearchResultType.File && x.TypeId == 1234).Any()); Assert.IsTrue(result.Where(x => x.Type == SearchResultType.Page && x.TypeId == 456).Any()); Assert.IsTrue(result.Where(x => x.Type == SearchResultType.Comment && x.TypeId == 789).Any()); Assert.IsTrue(result.Where(x => x.Type == SearchResultType.User && x.TypeId == 432).Any()); }
public void Cache_hit_returns_SearchResult() { // Arrange var q = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); var result = new SearchResult(); _cache.Setup(x => x.TryGet(GetKey(q), out result)).Returns(false).AtMostOnce().Verifiable(); // Act/Assert Assert.AreSame(result, Search.GetCachedQuery(q)); _cache.VerifyAll(); }
private string GetKey(SearchQuery query) { return "query:" + _user.ID + ":" + query.LuceneQuery; }
public SearchResult CacheQuery(XDoc searchDoc, SearchQuery query, TrackingInfo trackingInfo) { return _instance.CacheQuery(searchDoc, query, trackingInfo); }
public void Ranked_search_tracks_previous_queryid() { var searchMock = CreateMocks().Item1; var luceneMock = MockPlug.Setup(Utils.Settings.LuceneMockUri); var before = DateTime.UtcNow.AddDays(-2).WithoutMilliseconds(); var since = DateTime.UtcNow.AddDays(-1).WithoutMilliseconds(); uint limit = 10; uint offset = 20; ulong previousqueryid = 43; var responseXml = new XDoc("response"); var luceneXml = new XDoc("lucene"); var searchQuery = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); var searchResult = new SearchResult(); searchMock.Setup(x => x.BuildQuery("foo", "", SearchQueryParserType.BestGuess, false)).Returns(searchQuery).AtMostOnce().Verifiable(); searchMock.Setup(x => x.GetCachedQuery(searchQuery)).Returns((SearchResult)null).AtMostOnce().Verifiable(); luceneMock .Verb("GET") .At("compact") .With("wikiid", "default") .With("q", searchQuery.LuceneQuery) .Returns(luceneXml) .ExpectCalls(DreamTimes.Once()); searchMock.Setup(x => x.CacheQuery(It.IsAny<XDoc>(), searchQuery, It.Is<TrackingInfo>(t => t.PreviousQueryId == previousqueryid))) .Returns(searchResult) .AtMostOnce() .Verifiable(); searchMock.Setup(x => x.FormatResultSet( searchResult, It.IsAny<SetDiscriminator>(), false, It.IsAny<TrackingInfo>(), It.IsAny<Result<XDoc>>()) ) .Returns(new Result<XDoc>().WithReturn(responseXml)) .AtMostOnce() .Verifiable(); var response = _search .With("q", "foo") .With("previousqueryid", previousqueryid.ToString()) .With("since", since) .With("before", before) .With("limit", limit) .With("offset", offset) .Get(new Result<DreamMessage>()).Wait(); Assert.IsTrue(response.IsSuccessful, response.ToErrorString()); searchMock.VerifyAll(); luceneMock.Verify(); Assert.AreEqual(responseXml, response.ToDocument()); }
private Yield CachedSearch(ISearchBL search, SearchQuery query, SetDiscriminator discriminator, bool explain, TrackingInfo trackingInfo, Result<XDoc> result) { if(query.IsEmpty) { yield return search.FormatResultSet(new SearchResult(), discriminator, false, null, new Result<XDoc>()) .Set(result.Return); yield break; } var searchResultSet = search.GetCachedQuery(query); if(!explain && searchResultSet != null && (trackingInfo == null || trackingInfo.QueryId.HasValue)) { // we can only use the cached result set if we either don't have trackingInfo or the trackInfo has a queryId. Otherwise we have to re-query. yield return search.FormatResultSet(searchResultSet, discriminator, false, trackingInfo, new Result<XDoc>()).Set(result.Return); yield break; } // get search results DreamMessage res = null; var lucene = DekiContext.Current.Deki.LuceneIndex.At("compact").With("wikiid", DekiContext.Current.Instance.Id); yield return lucene.With("q", query.LuceneQuery).GetAsync().Set(x => res = x); if(!res.IsSuccessful) { result.Return(new XDoc("documents").Attr("error", string.Format(DekiResources.ERROR_QUERYING_SEARCH_INDEX, query.Raw.EncodeHtmlEntities()))); yield break; } searchResultSet = search.CacheQuery(FilterResults(res.ToDocument()), query, trackingInfo); yield return search.FormatResultSet(searchResultSet, discriminator, explain, trackingInfo, new Result<XDoc>()) .Set(result.Return); }
private Yield UnCachedSearch(SearchQuery query, SetDiscriminator discriminator, Result<XDoc> result) { if(query.IsEmpty) { result.Return(new XDoc("documents")); yield break; } // get search results Result<DreamMessage> res; yield return res = DekiContext.Current.Deki.LuceneIndex .With("wikiid", DekiContext.Current.Instance.Id) .With("q", query.LuceneQuery) .With("max", discriminator.Limit) .With("offset", discriminator.Offset) .With("sortBy", discriminator.SortBy ?? "").GetAsync(); if(!res.Value.IsSuccessful) { result.Return(new XDoc("documents").Attr("error", string.Format(DekiResources.ERROR_QUERYING_SEARCH_INDEX, query.Raw.EncodeHtmlEntities()))); yield break; } result.Return(FilterResults(res.Value.ToDocument())); }
//public static DekiResource ERROR_PARSING_SEARCH_QUERY() { return new DekiResource("System.API.Error.error_parsing_search_query"); } public static DekiResource ERROR_QUERYING_SEARCH_INDEX(SearchQuery query) { return new DekiResource("System.API.Error.error_querying_search_index", query.Raw.EncodeHtmlEntities()); }
private Yield CachedSearch(ISearchBL search, SearchQuery query, SetDiscriminator discriminator, bool explain, TrackingInfo trackingInfo, Result<XDoc> result) { if(query.IsEmpty) { yield return search.FormatResultSet(new SearchResult(), discriminator, false, null, new Result<XDoc>()) .Set(result.Return); yield break; } var searchResultSet = search.GetCachedQuery(query); if(!explain && searchResultSet != null && (trackingInfo == null || trackingInfo.QueryId.HasValue)) { // we can only use the cached result set if we either don't have trackingInfo or the trackInfo has a queryId. Otherwise we have to re-query. yield return search.FormatResultSet(searchResultSet, discriminator, false, trackingInfo, new Result<XDoc>()).Set(result.Return); yield break; } // get search results DreamMessage msg = null; var context = DekiContext.Current; var searchPlug = context.Deki.LuceneIndex .At("compact") .With("q", query.LuceneQuery) .With("wikiid", context.Instance.Id); if(!DekiContext.Current.IsValidApiKeyInRequest) { searchPlug = searchPlug .With("userid", context.User.ID) .With("apiuri", Self.Uri.AsPublicUri().ToString()); } yield return searchPlug .Get(new Result<DreamMessage>()) .Set(x => msg = x); if(!msg.IsSuccessful) { var resources = context.Resources; result.Return(new XDoc("documents").Attr("error", resources.Localize(DekiResources.ERROR_QUERYING_SEARCH_INDEX(query)))); yield break; } searchResultSet = search.CacheQuery(msg.ToDocument(), query, trackingInfo); yield return search.FormatResultSet(searchResultSet, discriminator, explain, trackingInfo, new Result<XDoc>()) .Set(result.Return); }
private Yield UnCachedSearch(SearchQuery query, SetDiscriminator discriminator, Result<XDoc> result) { if(query.IsEmpty) { result.Return(new XDoc("documents")); yield break; } // get search results DreamMessage msg = null; var context = DekiContext.Current; var searchPlug = context.Deki.LuceneIndex .With("q", query.LuceneQuery) .With("max", discriminator.Limit) .With("offset", discriminator.Offset) .With("sortBy", discriminator.SortBy ?? "") .With("wikiid", context.Instance.Id); if(!DekiContext.Current.IsValidApiKeyInRequest) { searchPlug = searchPlug .With("userid", context.User.ID) .With("apiuri", Self.Uri.AsPublicUri().ToString()); } yield return searchPlug .Get(new Result<DreamMessage>()) .Set(x => msg = x); if(!msg.IsSuccessful) { var resources = context.Resources; result.Return(new XDoc("documents").Attr("error", resources.Localize(DekiResources.ERROR_QUERYING_SEARCH_INDEX(query)))); yield break; } result.Return(msg.ToDocument()); }
public void CacheQuery_with_adaptive_search_fetches_popularity_data() { // Arrange var q = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); _cache.Setup(x => x.Set(GetKey(q), It.IsAny<SearchResult>(), It.IsAny<TimeSpan>())).AtMostOnce().Verifiable(); _session.Setup(x => x.SearchAnalytics_GetPopularityRanking(q.GetOrderedTermsHash())) .Returns((IEnumerable<ResultPopularityBE>)null) .AtMostOnce() .Verifiable(); // Act Search.CacheQuery(new XDoc("foo"), q, null); // Assert _session.VerifyAll(); _cache.VerifyAll(); }
public void NonCaching_search_without_apikey_passes_auth_info_to_lucene() { var search = _deki.AtLocalHost.At("site", "search"); var searchMock = CreateMocks().Item1; var luceneXml = new XDoc("lucene"); var searchQuery = new SearchQuery("raw", "processed", new LuceneClauseBuilder(), null); searchMock.Setup(x => x.BuildQuery("foo", "", SearchQueryParserType.BestGuess, false)).Returns(searchQuery).AtMostOnce().Verifiable(); XUri luceneUriCalled = null; MockPlug.Register(Utils.Settings.LuceneMockUri, (p, v, u, req, res) => { luceneUriCalled = u; res.Return(DreamMessage.Ok(luceneXml)); }); var response = search .With("q", "foo") .With("nocache", true) .Get(new Result<DreamMessage>()).Wait(); Assert.IsTrue(response.IsSuccessful, response.ToErrorString()); Assert.IsNotNull(luceneUriCalled, "lucene was not called"); Assert.AreEqual(Utils.Settings.LuceneMockUri, luceneUriCalled.WithoutQuery(), "lucene was called at wrong uri"); Assert.IsNotNull(luceneUriCalled.GetParam("apiuri"), "lucene request did not contain an apiuri parameter"); Assert.IsNotNull(luceneUriCalled.GetParam("userid"), "lucene request did not contain a userid parameter"); Assert.AreEqual(searchQuery.LuceneQuery, luceneUriCalled.GetParam("q"), "lucene request had incorrect q parameter"); }
public void Constraints_are_ANDed_to_Query() { var constraint = new LuceneClauseBuilder(); constraint.And("constraint"); var query = new SearchQuery("raw", "cooked", constraint, null); Assert.AreEqual(string.Format("+(cooked) {0}",constraint.Clause),query.LuceneQuery); }
public SearchResult GetCachedQuery(SearchQuery query) { return _instance.GetCachedQuery(query); }
public void Null_constraint_is_not_appended_to_query() { var query = new SearchQuery("raw", "cooked", new LuceneClauseBuilder(), null); Assert.AreEqual("cooked", query.LuceneQuery); }
public void Verify_that_default_resource_strings_have_correct_argument_count() { var resourceManager = new PlainTextResourceManager(Utils.Settings.DekiResourcesPath); var resourceMethods = from method in typeof(DekiResources).GetMethods(BindingFlags.Static | BindingFlags.Public) where method.ReturnType == typeof(DekiResource) select method; foreach(var method in resourceMethods) { var parameters = method.GetParameters(); var args = new object[parameters.Length]; for(var i = 0; i < parameters.Length; i++) { var type = parameters[i].ParameterType; if(type == typeof(string)) { args[i] = string.Empty; } else if(type.IsA<DekiResource>()) { // can't test resources that take other resources via this method } else if(type.IsA<MimeType>()) { args[i] = MimeType.TEXT_XML; } else if(type.IsA<SearchQuery>()) { args[i] = new SearchQuery("foo", "bar", new LuceneClauseBuilder(), null); } else if(type.IsA<XUri>()) { args[i] = new XUri("http://foo"); } else { try { args[i] = Activator.CreateInstance(parameters[i].ParameterType, false); } catch(Exception) { Assert.Fail(string.Format("{0}: cannot create argument instance of type '{1}'", method.Name, parameters[i].ParameterType)); } } } var resource = (DekiResource)method.Invoke(null, args); var format = resourceManager.GetString(resource.LocalizationKey, CultureInfo.InvariantCulture, null); Assert.IsNotNull(format, string.Format("{0}: No localization string exists for key '{1}'", method.Name, resource.LocalizationKey)); var paramSet = new HashSet<int>(); var matches = _paramsRegex.Matches(format); for(int i = 0; i < matches.Count; i++) { paramSet.Add(Convert.ToInt32(matches[i].Groups[1].Value)); } Assert.IsTrue(resource.Args.Length >= paramSet.Count, string.Format("{0}: too many parameters in string '{1}' ({2} < {3})", method.Name, format, resource.Args.Length, paramSet.Count)); if(paramSet.Count == 0) { continue; } Assert.AreEqual(paramSet.Count - 1, paramSet.OrderBy(x => x).Last(), string.Format("{0}: incorrect last parameter index '{1}'", method.Name, format)); } }
public ulong SearchAnalytics_LogQuery(SearchQuery query, string parsedQuery, uint userId, uint resultCount, ulong? previousQueryId) { return _next.SearchAnalytics_LogQuery(query, parsedQuery, userId, resultCount, previousQueryId); }