//--- Constructors --- public SearchResultItem(uint typeId, SearchResultType type, string title, double rank, DateTime modified) { TypeId = typeId; Type = type; Title = title; Modified = modified; Detail = null; _rank = rank; }
public void Can_roundtrip_SearchResultDetail_with_null_string() { var data = new SearchResultDetail(); data["null"] = null; using (var ms = new MemoryStream()) { _serializer.Serialize(ms, data); ms.Position = 0; var data2 = _serializer.Deserialize <SearchResultDetail>(ms); Assert.IsNull(data2["null"]); } }
public void Can_roundtrip_SearchResultDetail_with_empty_string2() { var data = new SearchResultDetail(); data["empty"] = ""; using (var ms = new MemoryStream()) { _serializer.Serialize(ms, data); ms.Position = 0; var data2 = _serializer.Deserialize <SearchResultDetail>(ms); Assert.AreEqual("", data2["empty"]); } }
public void Can_roundtrip_SearchResultDetail() { var data = new SearchResultDetail(); data["foo"] = "bar"; data["bzz"] = "bonk"; using(var ms = new MemoryStream()) { _serializer.Serialize(ms, data); ms.Position = 0; var data2 = _serializer.Deserialize<SearchResultDetail>(ms); Assert.AreEqual(data["foo"], data2["foo"]); Assert.AreEqual(data["bzz"], data2["bzz"]); } }
public void Can_roundtrip_SearchResultDetail() { var data = new SearchResultDetail(); data["foo"] = "bar"; data["bzz"] = "bonk"; using (var ms = new MemoryStream()) { _serializer.Serialize(ms, data); ms.Position = 0; var data2 = _serializer.Deserialize <SearchResultDetail>(ms); Assert.AreEqual(data["foo"], data2["foo"]); Assert.AreEqual(data["bzz"], data2["bzz"]); } }
public void FormatResultSet_generates_special_output_for_tracked_searches() { // Arrange var item = new SearchResultItem(1, SearchResultType.Page, "page", 1, DateTime.Parse("2010/10/10").ToSafeUniversalTime()); var detail = new SearchResultDetail(Pairs( "id.page", 1, "path","path", "uri","http://uri", "title", "foo", "author", "bob", "preview", "preview" )); var set = new SearchResult("parsed", new[] { item }); var discriminator = new SetDiscriminator() { Ascending = true, Limit = 100, Offset = 0, SortField = "rank" }; var explain = false; TrackingInfo trackingInfo = new TrackingInfo() { QueryId = 123, PreviousQueryId = 456 }; _cache.Setup(x => x.TryGet(item.DetailKey, out detail)).Returns(true).AtMostOnce().Verifiable(); // Act var xml = Search.FormatResultSet(set, discriminator, explain, trackingInfo, new Result<XDoc>()).Wait(); // Assert _cache.VerifyAll(); var expectedXml = XDocFactory.From(@" <search querycount=""1"" ranking=""adaptive"" queryid=""123"" count=""1""> <parsedQuery>parsed</parsedQuery> <result> <id>1</id> <uri>http://uri</uri> <uri.track>mock://api/site/query/123?pageid=1&rank=1&type=page&position=1</uri.track> <rank>1</rank> <title>foo</title> <page> <title>foo</title> <path>path</path> </page> <author>bob</author> <date.modified>2010-10-10T00:00:00Z</date.modified> <content>preview</content> <type>page</type> </result> </search>", MimeType.TEXT_XML); Assert.AreEqual(expectedXml.ToCompactString(), xml.ToCompactString()); }
public void Can_roundtrip_SearchResultDetail_with_null_string() { var data = new SearchResultDetail(); data["null"] = null; using(var ms = new MemoryStream()) { _serializer.Serialize(ms, data); ms.Position = 0; var data2 = _serializer.Deserialize<SearchResultDetail>(ms); Assert.IsNull(data2["null"]); } }
public void Can_roundtrip_SearchResultDetail_with_empty_string2() { var data = new SearchResultDetail(); data["empty"] = ""; using(var ms = new MemoryStream()) { _serializer.Serialize(ms, data); ms.Position = 0; var data2 = _serializer.Deserialize<SearchResultDetail>(ms); Assert.AreEqual("", data2["empty"]); } }
/// <summary> /// /// </summary> /// <param name="userSessionId"></param> /// <param name="searchText"></param> /// <returns></returns> /// <remarks> /// I should update parameter for GetSessionDetailByUserSessionId to be a string and decouple dependency from MongoDb ObjectId Model /// I should also create my own "OcrResult" to decouple dependency from Microsoft Azure Models /// </remarks> public async Task <SearchResultsDto> SearchUserSession(string userSessionId, string searchText) { var result = new SearchResultsDto(); var sessionDetailsFound = await _sessionDetailRepo.GetSessionDetailByUserSessionId(ObjectId.Parse(userSessionId)).ConfigureAwait(false); //var rawText = String.Empty; try { var detail = sessionDetailsFound.PrintedTextResult; var regions = detail.Regions; foreach (OcrRegion region in regions) { var lines = region.Lines; foreach (OcrLine line in lines) { var words = line.Words; foreach (OcrWord word in words) { if (word.Text.ToLower().Contains(searchText.ToLower())) //Searh for match { //find how many regions to exclude var regionFoundIndex = regions.IndexOf(region); var skipLastCount = regions.Count - regionFoundIndex; if (skipLastCount < 0) { skipLastCount = 0; } //find haw many lines of text there are to find the word var reg = regions.SkipLast(skipLastCount); var linesInPriorRegions = 0; if (reg.Any()) { linesInPriorRegions = reg.Sum(x => x.Lines.Count()); } var lineNumber = linesInPriorRegions + region.Lines.IndexOf(line) + 1; //find how many words to exclude var wordsFoundIndex = words.IndexOf(word); var skipLastWordCount = words.Count - wordsFoundIndex; if (skipLastWordCount < 0) { skipLastWordCount = 0; } //find how many characters to find the word in the line var filteredWords = words.SkipLast(skipLastWordCount); var charInPriorWords = 0; if (filteredWords.Any()) { charInPriorWords = filteredWords.Sum(w => w.Text.Length); } var intPosition = charInPriorWords + filteredWords.Count(); //count number of words and spaces. will not include special characters. //Map from Ocr classes to custom classes (to reduce dependency on Ocr classes var resultText = new SearchResultText() { BoundingBox = word.BoundingBox, Text = word.Text }; var resultLine = new SearchResultLine() { BoundingBox = line.BoundingBox, Text = words.Select(w => new SearchResultText() { BoundingBox = w.BoundingBox, Text = w.Text }).ToList() }; var resultRegion = new SearchResultRegion() { BoundingBox = region.BoundingBox, Lines = new List <SearchResultLine>() { resultLine } }; var resultDetail = new SearchResultDetail() { Language = detail.Language, TextAngle = detail.TextAngle, Orientation = detail.Orientation, Regions = new List <SearchResultRegion>() { resultRegion } }; //build result details var resultDetails = new SearchResultDetails(resultText, resultLine, resultRegion, resultDetail, lineNumber, intPosition); //add to results result.ResultDetails.Add(resultDetails); } } } } } finally { if (sessionDetailsFound != null) { sessionDetailsFound = null; } } return(result); }
/// <summary> /// Retrieve the search results from the database /// </summary> /// <param name="searchterm"></param> /// <param name="searchclass"></param> /// <param name="searchlimit"></param> /// <returns></returns> private List<SearchResult> getSearchResults(string searchterm, string searchclass, int searchlimit) { // Prepare a list of results to return List<SearchResult> results = new List<SearchResult>(); // Put catcher in for a null search term if (searchterm == null) return results; // Get the connection info string connectionInfo = ConfigurationManager.ConnectionStrings["ViewerSettings"].ConnectionString; // Create the connection SqlConnection conn = new SqlConnection(connectionInfo); // Generate the command object string sqlstring = "pLocationSearch_NZTM"; SqlCommand cmd = new SqlCommand(sqlstring, conn); cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.Add("@searchlimit", SqlDbType.Int).Value = searchlimit; cmd.Parameters.Add("@searchterm", SqlDbType.VarChar).Value = searchterm; cmd.Parameters.Add("@searchclass", SqlDbType.VarChar).Value = searchclass; // Get the data try { // Open the connection to the database conn.Open(); // Execute the reader SqlDataReader myReader = cmd.ExecuteReader(); // Prepare the results while (myReader.Read()) { // Create a details object SearchResultDetail detail = new SearchResultDetail() { searchkey = (string)myReader.GetString(0), searchclass = (string)myReader.GetString(1), keydescription = (string)myReader.GetString(5), x = (double)myReader.GetDouble(6), y = (double)myReader.GetDouble(7), outSR = (int)myReader.GetInt32(8) }; // Create a result SearchResult myresult = new SearchResult() { label = (string)myReader.GetString(4), value = detail }; // Add result to the results list results.Add(myresult); } // Sort the results results = results.OrderBy(o => o.label).ToList(); // Check for no results - add a black result if (results.Count == 0) { // Create a blank result // Create a details object SearchResultDetail detail = new SearchResultDetail() { searchkey = "No Matching Results", searchclass = "", keydescription = "No Matching Results", x = 0, y = 0, outSR = 0 }; SearchResult myresult = new SearchResult() { label = "", value = detail }; // Add result to the results list results.Add(myresult); } // Cleanup myReader.Close(); conn.Close(); } catch (Exception e) { throw e; } return results; }
private Yield PopulateDetail(List <SearchResultItem> items, Result result) { var pages = new List <SearchResultItem>(); var files = new List <SearchResultItem>(); var comments = new List <SearchResultItem>(); var users = new List <SearchResultItem>(); var hits = 0; foreach (var item in items) { SearchResultDetail detail; if (_cache.TryGet(item.DetailKey, out detail)) { _log.TraceFormat("got {0} from cache", item.DetailKey); } if (detail == null) { // cache miss switch (item.Type) { case SearchResultType.User: users.Add(item); break; case SearchResultType.File: files.Add(item); break; case SearchResultType.Comment: comments.Add(item); break; default: pages.Add(item); break; } } else { hits++; item.Detail = detail; } } _log.DebugFormat("got {0}/{1} items from cache", hits, items.Count); if (pages.Any() || files.Any() || comments.Any()) { // query lucene for meta data var itemLookup = new Dictionary <string, SearchResultItem>(); var queries = new List <String>(); if (pages.Any()) { BuildLookupQueries(pages, itemLookup, queries, "(id.page:(", ") AND type:wiki) "); } if (files.Any()) { BuildLookupQueries(files, itemLookup, queries, "(id.file:(", ")) "); } if (comments.Any()) { BuildLookupQueries(comments, itemLookup, queries, "(id.comment:(", ")) "); } if (users.Any()) { BuildLookupQueries(users, itemLookup, queries, "(id.user:(", ")) "); } var count = 0; foreach (var query in queries) { count++; _log.DebugFormat("querying lucene for details ({0}/{1})", count, queries.Count); DreamMessage luceneResult = null; yield return(_searchPlug.With("wikiid", _wikiid).With("q", query).With("max", "all").GetAsync().Set(x => luceneResult = x)); if (!luceneResult.IsSuccessful) { // TODO (arnec): need error handling story here throw new Exception("unable to query lucene for details"); } var details = luceneResult.ToDocument()["document"]; _log.DebugFormat("retrieved {0} detail records", details.ListLength); foreach (var detailDoc in details) { var detail = SearchResultDetail.FromXDoc(detailDoc); SearchResultItem item; if (detail["id.file"] != null) { itemLookup.TryGetValue(SearchResultType.File + ":" + detail["id.file"], out item); } else if (detail["id.comment"] != null) { itemLookup.TryGetValue(SearchResultType.Comment + ":" + detail["id.comment"], out item); } else if (detail["id.user"] != null) { itemLookup.TryGetValue(SearchResultType.User + ":" + detail["id.user"], out item); } else { itemLookup.TryGetValue(SearchResultType.Page + ":" + detail["id.page"], out item); } if (item != null) { _log.TraceFormat("got {0} from lucene", item.DetailKey); _cache.Set(item.DetailKey, detail, TimeSpan.FromSeconds(_settings.GetValue("search/date-cache-time", 60d))); item.Detail = detail; } } } _log.Debug("finished populating and caching detail records"); } result.Return(); }