/// <summary> Constructor for a new instance of the Multiple_Paged_Results_Args class </summary> /// <param name="Statistics"> Statistics/information about the overall search or browse, including initial query time, complete results counts, and facets </param> /// <param name="Paged_Results"> Single page of results which are a collection of search title results </param> public Multiple_Paged_Results_Args(Search_Results_Statistics Statistics, List <iSearch_Title_Result> Paged_Results) { this.Statistics = Statistics; this.Paged_Results = new List <List <iSearch_Title_Result> > { Paged_Results }; }
/// <summary> Constructor for a new instance of the RequestCache class </summary> /// <param name="Current_Mode"> Mode / navigation information for the current request</param> /// <param name="Hierarchy_Object"> Current item aggregation object to display </param> /// <param name="Results_Statistics"> Information about the entire set of results for a search or browse </param> /// <param name="Paged_Results"> Single page of results for a search or browse, within the entire set </param> /// <param name="Browse_Object"> Object contains all the basic information about any browse or info display </param> /// <param name="Current_Item"> Current item to display </param> /// <param name="Current_Page"> Current page within the item</param> /// <param name="HTML_Skin"> HTML Web skin which controls the overall appearance of this digital library </param> /// <param name="Current_User"> Currently logged on user </param> /// <param name="Public_Folder"> Object contains the information about the public folder to display </param> /// <param name="Site_Map"> Optional site map object used to render a navigational tree-view on left side of static web content pages </param> /// <param name="Items_In_Title"> List of items within the current title ( used for the Item Group display )</param> /// <param name="Static_Web_Content"> HTML content-based browse, info, or imple CMS-style web content objects. These are objects which are read from a static HTML file and much of the head information must be maintained </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> public RequestCache(Navigation_Object Current_Mode, Item_Aggregation Hierarchy_Object, Search_Results_Statistics Results_Statistics, List<iSearch_Title_Result> Paged_Results, Item_Aggregation_Child_Page Browse_Object, SobekCM_Item Current_Item, Page_TreeNode Current_Page, Web_Skin_Object HTML_Skin, User_Object Current_User, Public_User_Folder Public_Folder, SobekCM_SiteMap Site_Map, SobekCM_Items_In_Title Items_In_Title, HTML_Based_Content Static_Web_Content, Custom_Tracer Tracer) { this.Current_Mode = Current_Mode; this.Hierarchy_Object = Hierarchy_Object; this.Results_Statistics = Results_Statistics; this.Paged_Results = Paged_Results; this.Browse_Object = Browse_Object; this.Current_Item = Current_Item; this.Current_Page = Current_Page; this.HTML_Skin = HTML_Skin; this.Current_User = Current_User; this.Public_Folder = Public_Folder; this.Site_Map = Site_Map; this.Items_In_Title = Items_In_Title; this.Static_Web_Content = Static_Web_Content; this.Tracer = Tracer; }
/// <summary> Constructor for a new instance of the AggregationViewBag class </summary> /// <param name="Hierarchy_Object"> Current item aggregation object to display </param> /// <param name="Results_Statistics"> Information about the entire set of results for a browse </param> /// <param name="Paged_Results"> Single page of results for a browse, within the entire set </param> /// <param name="Browse_Object"> Object contains all the basic information about any browse or info display </param> /// <param name="Static_Web_Content"> HTML content-based aggregation browse or info. These are objects which are read from a static HTML file and much of the head information must be maintained </param> public AggregationViewBag(Item_Aggregation Hierarchy_Object, Search_Results_Statistics Results_Statistics, List<iSearch_Title_Result> Paged_Results, Item_Aggregation_Child_Page Browse_Object, HTML_Based_Content Static_Web_Content) { this.Hierarchy_Object = Hierarchy_Object; this.Results_Statistics = Results_Statistics; this.Paged_Results = Paged_Results; this.Browse_Object = Browse_Object; this.Static_Web_Content = Static_Web_Content; }
/// <summary> Constructor for a new instance of the abstract_ResultsViewer class </summary> /// <param name="RequestSpecificValues"> All the necessary, non-global data specific to the current request </param> /// <param name="ResultsStats"> Statistics about the results to display including the facets </param> /// <param name="PagedResults"> Actual pages of results </param> protected abstract_ResultsViewer(RequestCache RequestSpecificValues, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> PagedResults) { this.RequestSpecificValues = RequestSpecificValues; this.ResultsStats = ResultsStats; this.PagedResults = PagedResults; // Determine the current user mask CurrentUserMask = 0; if ((HttpContext.Current != null) && ( HttpContext.Current.Session["IP_Range_Membership"] != null )) { CurrentUserMask = (int)HttpContext.Current.Session["IP_Range_Membership"]; } }
/// <summary> Constructor for a new instance of the RequestCache class </summary> /// <param name="Current_Mode"> Mode / navigation information for the current request</param> /// <param name="Results_Statistics"> Information about the entire set of results for a search or browse </param> /// <param name="Paged_Results"> Single page of results for a search or browse, within the entire set </param> /// <param name="HTML_Skin"> HTML Web skin which controls the overall appearance of this digital library </param> /// <param name="Current_User"> Currently logged on user </param> /// <param name="Public_Folder"> Object contains the information about the public folder to display </param> /// <param name="Top_Collection"> Item aggregation for the top-level collection, which is used in a number of places, for example /// showing the correct banner, even when it is not the "current" aggregation </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> public RequestCache(Navigation_Object Current_Mode, Search_Results_Statistics Results_Statistics, List<iSearch_Title_Result> Paged_Results, Web_Skin_Object HTML_Skin, User_Object Current_User, Public_User_Folder Public_Folder, Item_Aggregation Top_Collection, Custom_Tracer Tracer) { this.Current_Mode = Current_Mode; this.Results_Statistics = Results_Statistics; this.Paged_Results = Paged_Results; this.HTML_Skin = HTML_Skin; this.Current_User = Current_User; this.Public_Folder = Public_Folder; this.Top_Collection = Top_Collection; this.Tracer = Tracer; }
/// <summary> Constructor for a new instance of the Full_ResultsViewer class </summary> /// <param name="RequestSpecificValues"> All the necessary, non-global data specific to the current request </param> /// <param name="ResultsStats"> Statistics about the results to display including the facets </param> /// <param name="PagedResults"> Actual pages of results </param> public Google_Map_ResultsViewer_Beta(RequestCache RequestSpecificValues, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> PagedResults) : base(RequestSpecificValues, ResultsStats, PagedResults) { //holds actions from page string payload = HttpContext.Current.Request.Form["payload"] ?? String.Empty; // See if there were hidden requests if (!String.IsNullOrEmpty(payload)) { //if (action == "action") Perform_Callback_Action(payload, RequestSpecificValues.Tracer); } else { //do a search for all the items in this agg string temp_AggregationId = RequestSpecificValues.Current_Mode.Aggregation; string[] temp_AggregationList = temp_AggregationId.Split(' '); Perform_Aggregation_Search(temp_AggregationList, RequestSpecificValues.Tracer); } }
/// <summary> Constructor for a new instance of the paged_result_html_subwriter class </summary> /// <param name="RequestSpecificValues"> All the necessary, non-global data specific to the current request </param> /// <param name="ResultsStats"> Statistics about the results to display including the facets </param> /// <param name="PagedResults"> Actual pages of results </param> public PagedResults_HtmlSubwriter(RequestCache RequestSpecificValues, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> PagedResults) : base(RequestSpecificValues) { // Save the search results info resultsStatistics = ResultsStats; pagedResults = PagedResults; // Check that the current aggregation code is valid if (!UI_ApplicationCache_Gateway.Aggregations.isValidCode(RequestSpecificValues.Current_Mode.Aggregation)) { // Is there a "forward value" if (UI_ApplicationCache_Gateway.Collection_Aliases.ContainsKey(RequestSpecificValues.Current_Mode.Aggregation)) { RequestSpecificValues.Current_Mode.Aggregation = UI_ApplicationCache_Gateway.Collection_Aliases[RequestSpecificValues.Current_Mode.Aggregation]; } } // Use the method in the base class to actually pull the entire hierarchy if (!Get_Collection(RequestSpecificValues.Current_Mode, RequestSpecificValues.Tracer, out hierarchyObject)) { RequestSpecificValues.Current_Mode.Mode = Display_Mode_Enum.Error; return; } Browse_Title = String.Empty; sortOptions = String.Empty; leftButtons = String.Empty; rightButtons = String.Empty; Showing_Text = String.Empty; Include_Bookshelf_View = false; Outer_Form_Name = String.Empty; Folder_Owner_Name = String.Empty; Folder_Owner_Email = String.Empty; term_counter = 0; // Try to get the facet configuration information facetInformation = "00000000"; if (HttpContext.Current.Request.Form["facet"] != null) facetInformation = HttpContext.Current.Request.Form["facet"].PadRight(7, '0'); if ( true ) // if (currentMode.isPostBack) { // Pull the standard values NameValueCollection form = HttpContext.Current.Request.Form; if (form["item_action"] != null) { string action = form["item_action"].ToLower().Trim(); string url_description = form["url_description"].Trim(); if (action == "email") { string address = form["email_address"].Replace(";", ",").Trim(); string comments = form["email_comments"].Trim(); string format = form["email_format"].Trim().ToUpper(); string list_type = form["list_type"].Trim(); if (address.Length > 0) { // Determine the email format bool is_html_format = format != "TEXT"; // CC: the user, unless they are already on the list string cc_list = RequestSpecificValues.Current_User.Email; if (address.ToUpper().IndexOf(RequestSpecificValues.Current_User.Email.ToUpper()) >= 0) cc_list = String.Empty; // Send the email string any_error = URL_Email_Helper.Send_Email(address, cc_list, comments, RequestSpecificValues.Current_User.Full_Name, RequestSpecificValues.Current_Mode.Instance_Abbreviation, is_html_format, HttpContext.Current.Items["Original_URL"].ToString(), url_description, list_type, RequestSpecificValues.Current_User.UserID); HttpContext.Current.Session.Add("ON_LOAD_MESSAGE", any_error.Length > 0 ? any_error : "Your email has been sent"); RequestSpecificValues.Current_Mode.isPostBack = true; // Do this to force a return trip (cirumnavigate cacheing) string original_url = HttpContext.Current.Items["Original_URL"].ToString(); if ( original_url.IndexOf("?") < 0 ) HttpContext.Current.Response.Redirect(original_url + "?p=" + DateTime.Now.Millisecond, false); else HttpContext.Current.Response.Redirect(original_url + "&p=" + DateTime.Now.Millisecond, false); HttpContext.Current.ApplicationInstance.CompleteRequest(); RequestSpecificValues.Current_Mode.Request_Completed = true; return; } } if (action == "save_search") { string usernotes = HttpContext.Current.Request.Form["add_notes"].Trim(); bool open_searches = HttpContext.Current.Request.Form["open_searches"] != null; string original_url = HttpContext.Current.Items["Original_URL"].ToString(); if (SobekCM_Database.Save_User_Search(RequestSpecificValues.Current_User.UserID, original_url, url_description, 0, usernotes, RequestSpecificValues.Tracer) != -1000) { if (open_searches) { HttpContext.Current.Session.Add("ON_LOAD_WINDOW", "?m=lms"); } HttpContext.Current.Session.Add("ON_LOAD_MESSAGE", "Search has been saved to your saved searches."); } else { HttpContext.Current.Session.Add("ON_LOAD_MESSAGE", "ERROR encountered while saving!"); } // Do this to force a return trip (cirumnavigate cacheing) RequestSpecificValues.Current_Mode.isPostBack = true; if (original_url.IndexOf("?") > 0) { HttpContext.Current.Response.Redirect(original_url + "&p=" + DateTime.Now.Millisecond, false); } else { HttpContext.Current.Response.Redirect(original_url + "?p=" + DateTime.Now.Millisecond, false); } HttpContext.Current.ApplicationInstance.CompleteRequest(); RequestSpecificValues.Current_Mode.Request_Completed = true; } } } }
/// <summary> Stores a (assumed private) user's folder browse into the cache </summary> /// <param name="User_ID"> Primary key for the user </param> /// <param name="Folder_Name"> Name of the folder to store </param> /// <param name="StoreObject"> Object to store </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> public static void Store_User_Folder_Browse_Statistics(int User_ID, string Folder_Name, Search_Results_Statistics StoreObject, Custom_Tracer Tracer) { // If the cache is disabled, just return before even tracing if (Settings.Disabled) return; string key = "USER_FOLDER_" + User_ID + "_" + Folder_Name.ToLower() + "_STATISTICS"; if (Tracer != null) { Tracer.Add_Trace("CachedDataManager.Store_User_Folder_Browse_Statistics", "Adding object '" + key + "' to the cache with expiration of 1 minute"); } if (HttpContext.Current.Cache[key] == null) { HttpContext.Current.Cache.Insert(key, StoreObject, null, Cache.NoAbsoluteExpiration, TimeSpan.FromMinutes(1)); } }
/// <summary> Stores the table of search results to the cache </summary> /// <param name="Aggregation_Code"> Aggregation code for the browse statistics to store in the cache </param> /// <param name="Browse_Name"> Name of the browse to store the browse statistics in the cache</param> /// <param name="StoreObject"> Search results item/title list </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> public static void Store_Browse_Result_Statistics(string Aggregation_Code, string Browse_Name, Search_Results_Statistics StoreObject, Custom_Tracer Tracer) { // If the cache is disabled, just return before even tracing if (Settings.Disabled) return; // Determine the key string key = "TOTALBROWSE_" + Aggregation_Code.ToUpper() + "_" + Browse_Name.ToUpper(); // Store this on the local cache, if not there and storing on the cache server failed if (HttpContext.Current.Cache[key] == null) { if (Tracer != null) { Tracer.Add_Trace("CachedDataManager.Store_Browse_Result_Statistics", "Adding object '" + key + "' to the local cache with expiration of 3 minutes"); } HttpContext.Current.Cache.Insert(key, StoreObject, null, Cache.NoAbsoluteExpiration, TimeSpan.FromMinutes(3)); } }
/// <summary> Display search results in simple XML format </summary> /// <param name="Output"> Stream to which to write the text for this main writer </param> /// <param name="Args"></param> /// <param name="ResultsStats"></param> /// <param name="ResultsPage"></param> protected internal void legacy_xml_display_search_results(TextWriter Output, Results_Arguments Args, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> ResultsPage) { // Get the URL and network roots string image_url = Engine_ApplicationCache_Gateway.Settings.Servers.Image_URL; string network = Engine_ApplicationCache_Gateway.Settings.Servers.Image_Server_Network; string base_url = Engine_ApplicationCache_Gateway.Settings.Servers.Base_URL; if (HttpContext.Current != null) { base_url = HttpContext.Current.Request.Url.AbsoluteUri; if (base_url.IndexOf("?") > 0) base_url = base_url.Substring(0, base_url.IndexOf("?")).Replace("sobekcm.svc", ""); } if ((base_url.Length > 0) && (base_url[base_url.Length - 1] != '/')) base_url = base_url + "/"; if ((image_url.Length > 0) && (image_url[image_url.Length - 1] != '/')) image_url = image_url + "/"; // Write the header first Output.WriteLine("<?xml version=\"1.0\" encoding=\"UTF-8\" ?> "); Output.WriteLine("<ResultSet Page=\"" + Args.Page + "\" Total=\"" + ResultsStats.Total_Titles + "\">"); // Now, add XML for each title string lastBibID = string.Empty; foreach (iSearch_Title_Result thisResult in ResultsPage) { if (thisResult.BibID != lastBibID) { if (lastBibID.Length > 0) Output.WriteLine("</TitleResult>"); Output.WriteLine("<TitleResult ID=\"" + thisResult.BibID + "\">"); lastBibID = thisResult.BibID; } // Determine folder from BibID string folder = thisResult.BibID.Substring(0, 2) + "/" + thisResult.BibID.Substring(2, 2) + "/" + thisResult.BibID.Substring(4, 2) + "/" + thisResult.BibID.Substring(6, 2) + "/" + thisResult.BibID.Substring(8); // Now, add XML for each item for (int i = 0; i < thisResult.Item_Count; i++) { iSearch_Item_Result itemResult = thisResult.Get_Item(i); Output.WriteLine("\t<ItemResult ID=\"" + thisResult.BibID + "_" + itemResult.VID + "\">"); Output.Write("\t\t<Title>"); Write_XML(Output, itemResult.Title); Output.WriteLine("</Title>"); if ( !String.IsNullOrEmpty(itemResult.PubDate)) { Output.Write("\t\t<Date>"); Write_XML(Output, itemResult.PubDate); Output.WriteLine("</Date>"); } Output.WriteLine("\t\t<Location>"); Output.WriteLine("\t\t\t<URL>" + base_url + thisResult.BibID + "/" + itemResult.VID + "</URL>"); if (!String.IsNullOrEmpty(itemResult.MainThumbnail)) { Output.WriteLine("\t\t\t<MainThumb>" + image_url + folder + "/" + itemResult.VID + "/" + itemResult.MainThumbnail + "</MainThumb>"); } Output.WriteLine("\t\t\t<Folder type=\"web\">" + image_url + folder + "/" + itemResult.VID + "</Folder>"); Output.WriteLine("\t\t\t<Folder type=\"network\">" + network + folder.Replace("/", "\\") + "\\" + itemResult.VID + "</Folder>"); Output.WriteLine("\t\t</Location>"); Output.WriteLine("\t</ItemResult>"); } } if (ResultsPage.Count > 0) Output.WriteLine("</TitleResult>"); Output.WriteLine("</ResultSet>"); }
/// <summary> Performs a search ( or retrieves the search results from the cache ) and outputs the results and search url used </summary> /// <param name="Current_Mode"> Mode / navigation information for the current request</param> /// <param name="Aggregation_Object"> Object for the current aggregation object, against which this search is performed </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> /// <param name="Complete_Result_Set_Info"> [OUT] Information about the entire set of results </param> /// <param name="Paged_Results"> [OUT] List of search results for the requested page of results </param> public ResultsEndpointErrorEnum Get_Search_Results(Results_Arguments Current_Mode, Complete_Item_Aggregation Aggregation_Object, Custom_Tracer Tracer, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results) { Tracer.Add_Trace("SobekCM_Assistant.Get_Search_Results", String.Empty); // Set output initially to null Paged_Results = null; Complete_Result_Set_Info = null; // Get the sort int sort = Current_Mode.Sort.HasValue ? Math.Max(Current_Mode.Sort.Value, ((ushort)1)) : 0; if ((sort != 0) && (sort != 1) && (sort != 2) && (sort != 10) && (sort != 11)) sort = 0; // If there was no search, it is a browse if (String.IsNullOrEmpty(Current_Mode.Search_String)) { // Get the page count in the results int current_page_index = Current_Mode.Page; // Set the flags for how much data is needed. (i.e., do we need to pull ANYTHING? or // perhaps just the next page of results ( as opposed to pulling facets again). bool need_browse_statistics = true; bool need_paged_results = true; if (Current_Mode.Use_Cache) { // Look to see if the browse statistics are available on any cache for this browse Complete_Result_Set_Info = CachedDataManager.Retrieve_Browse_Result_Statistics(Aggregation_Object.Code, "all", Tracer); if (Complete_Result_Set_Info != null) need_browse_statistics = false; // Look to see if the paged results are available on any cache.. Paged_Results = CachedDataManager.Retrieve_Browse_Results(Aggregation_Object.Code, "all", current_page_index, sort, Tracer); if (Paged_Results != null) need_paged_results = false; } // Was a copy found in the cache? if ((!need_browse_statistics) && (!need_paged_results)) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_Browse_Info", "Browse statistics and paged results retrieved from cache"); } } else { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_Browse_Info", "Building results information"); } // Try to pull more than one page, so we can cache the next page or so List<List<iSearch_Title_Result>> pagesOfResults; // Get from the hierarchy object Multiple_Paged_Results_Args returnArgs = Item_Aggregation_Utilities.Gat_All_Browse(Aggregation_Object, current_page_index, sort, (int) Current_Mode.ResultsPerPage, Current_Mode.Use_Cache, need_browse_statistics, Tracer); if (need_browse_statistics) { Complete_Result_Set_Info = returnArgs.Statistics; foreach (Search_Facet_Collection thisFacet in Complete_Result_Set_Info.Facet_Collections) { Metadata_Search_Field field = Engine_ApplicationCache_Gateway.Settings.Metadata_Search_Field_By_ID(thisFacet.MetadataTypeID); thisFacet.MetadataTerm = field.Facet_Term; } } pagesOfResults = returnArgs.Paged_Results; if ((pagesOfResults != null) && (pagesOfResults.Count > 0)) Paged_Results = pagesOfResults[0]; // Save the overall result set statistics to the cache if something was pulled if (Current_Mode.Use_Cache) { if ((need_browse_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Browse_Result_Statistics(Aggregation_Object.Code, "all", Complete_Result_Set_Info, Tracer); } // Save the overall result set statistics to the cache if something was pulled if ((need_paged_results) && (Paged_Results != null)) { CachedDataManager.Store_Browse_Results(Aggregation_Object.Code, "all", current_page_index, sort, pagesOfResults, Tracer); } } } return ResultsEndpointErrorEnum.NONE; } // Depending on type of search, either go to database or Greenstone if (Current_Mode.Search_Type == Search_Type_Enum.Map) { try { double lat1 = 1000; double long1 = 1000; double lat2 = 1000; double long2 = 1000; string[] terms = Current_Mode.Coordinates.Split(",".ToCharArray()); if (terms.Length < 4) { lat1 = Convert.ToDouble(terms[0]); lat2 = lat1; long1 = Convert.ToDouble(terms[1]); long2 = long1; } if (terms.Length >= 4) { if (terms[0].Length > 0) lat1 = Convert.ToDouble(terms[0]); if (terms[1].Length > 0) long1 = Convert.ToDouble(terms[1]); if (terms[2].Length > 0) lat2 = Convert.ToDouble(terms[2]); if (terms[3].Length > 0) long2 = Convert.ToDouble(terms[3]); } // If just the first point is valid, use that if ((lat2 == 1000) || (long2 == 1000)) { lat2 = lat1; long2 = long1; } // If just the second point is valid, use that if ((lat1 == 1000) || (long1 == 1000)) { lat1 = lat2; long1 = long2; } // Perform the search against the database try { // Get the page count in the results int current_page_index = Current_Mode.Page; // Try to pull more than one page, so we can cache the next page or so Multiple_Paged_Results_Args returnArgs = Engine_Database.Get_Items_By_Coordinates(Current_Mode.Aggregation, lat1, long1, lat2, long2, false, 20, current_page_index, sort, false, new List<short>(), true, Tracer); List<List<iSearch_Title_Result>> pagesOfResults = returnArgs.Paged_Results; Complete_Result_Set_Info = returnArgs.Statistics; if ((pagesOfResults != null) && (pagesOfResults.Count > 0)) Paged_Results = pagesOfResults[0]; } catch (Exception ee) { // Next, show the message to the user Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception caught while performing database coordinate search"); Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception: " + ee.Message); Tracer.Add_Trace("ResultServices.Get_Search_Results", ee.StackTrace); return ee.Message.ToUpper().IndexOf("TIMEOUT") >= 0 ? ResultsEndpointErrorEnum.Database_Timeout_Exception : ResultsEndpointErrorEnum.Database_Exception; } } catch (Exception ee ) { // Next, show the message to the user Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception: " + ee.Message); Tracer.Add_Trace("ResultServices.Get_Search_Results", ee.StackTrace); return ResultsEndpointErrorEnum.Unknown; } } else { List<string> terms = new List<string>(); List<string> web_fields = new List<string>(); // Split the terms correctly ( only use the database stop words for the split if this will go to the database ultimately) if ((Current_Mode.Search_Type == Search_Type_Enum.Full_Text) || (Current_Mode.Search_Fields.IndexOf("TX") >= 0)) { Split_Clean_Search_Terms_Fields(Current_Mode.Search_String, Current_Mode.Search_Fields, Current_Mode.Search_Type, terms, web_fields, null, Current_Mode.Search_Precision, ','); } else { Split_Clean_Search_Terms_Fields(Current_Mode.Search_String, Current_Mode.Search_Fields, Current_Mode.Search_Type, terms, web_fields, Engine_ApplicationCache_Gateway.StopWords, Current_Mode.Search_Precision, ','); } // Get the count that will be used int actualCount = Math.Min(terms.Count, web_fields.Count); // Determine if this is a special search type which returns more rows and is not cached. // This is used to return the results as XML and DATASET int results_per_page = (int) Current_Mode.ResultsPerPage; // Determine if a date range was provided long date1 = -1; long date2 = -1; if (Current_Mode.DateRange_Date1.HasValue) { date1 = Current_Mode.DateRange_Date1.Value; if (Current_Mode.DateRange_Date2.HasValue) { if (Current_Mode.DateRange_Date2.Value >= Current_Mode.DateRange_Date1.Value) date2 = Current_Mode.DateRange_Date2.Value; else { date1 = Current_Mode.DateRange_Date2.Value; date2 = Current_Mode.DateRange_Date1.Value; } } else { date2 = date1; } } if (date1 < 0) { if ((Current_Mode.DateRange_Year1.HasValue) && (Current_Mode.DateRange_Year1.Value > 0)) { DateTime startDate = new DateTime(Current_Mode.DateRange_Year1.Value, 1, 1); TimeSpan timeElapsed = startDate.Subtract(new DateTime(1, 1, 1)); date1 = (long)timeElapsed.TotalDays; if ((Current_Mode.DateRange_Year2.HasValue) && (Current_Mode.DateRange_Year2.Value > 0)) { startDate = new DateTime(Current_Mode.DateRange_Year2.Value, 12, 31); timeElapsed = startDate.Subtract(new DateTime(1, 1, 1)); date2 = (long)timeElapsed.TotalDays; } else { startDate = new DateTime(Current_Mode.DateRange_Year1.Value, 12, 31); timeElapsed = startDate.Subtract(new DateTime(1, 1, 1)); date2 = (long)timeElapsed.TotalDays; } } } // Set the flags for how much data is needed. (i.e., do we need to pull ANYTHING? or // perhaps just the next page of results ( as opposed to pulling facets again). bool need_search_statistics = true; bool need_paged_results = true; if (Current_Mode.Use_Cache) { // Look to see if the search statistics are available on any cache.. Complete_Result_Set_Info = CachedDataManager.Retrieve_Search_Result_Statistics(Current_Mode, actualCount, web_fields, terms, date1, date2, Tracer); if (Complete_Result_Set_Info != null) need_search_statistics = false; // Look to see if the paged results are available on any cache.. Paged_Results = CachedDataManager.Retrieve_Search_Results(Current_Mode, sort, actualCount, web_fields, terms, date1, date2, Tracer); if (Paged_Results != null) need_paged_results = false; } // If both were retrieved, do nothing else if ((need_paged_results) || (need_search_statistics)) { // Should this pull the search from the database, or from greenstone? if ((Current_Mode.Search_Type == Search_Type_Enum.Full_Text) || (Current_Mode.Search_Fields.IndexOf("TX") >= 0)) { try { // Get the page count in the results int current_page_index = Current_Mode.Page; // Perform the search against greenstone Search_Results_Statistics recomputed_search_statistics; Perform_Solr_Search(Tracer, terms, web_fields, actualCount, Current_Mode.Aggregation, current_page_index, sort, results_per_page, out recomputed_search_statistics, out Paged_Results); if (need_search_statistics) { Complete_Result_Set_Info = recomputed_search_statistics; foreach (Search_Facet_Collection thisFacet in Complete_Result_Set_Info.Facet_Collections) { Metadata_Search_Field field = Engine_ApplicationCache_Gateway.Settings.Metadata_Search_Field_By_ID(thisFacet.MetadataTypeID); thisFacet.MetadataTerm = field.Facet_Term; } } } catch (Exception ee) { // Next, show the message to the user Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception caught while performing solr/lucene search"); Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception: " + ee.Message); Tracer.Add_Trace("ResultServices.Get_Search_Results", ee.StackTrace); return ResultsEndpointErrorEnum.Unknown; } // If this was a special search, don't cache this if (!Current_Mode.Use_Cache) { // Cache the search statistics, if it was needed if ((need_search_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Search_Result_Statistics(Current_Mode, actualCount, web_fields, terms, date1, date2, Complete_Result_Set_Info, Tracer); } // Cache the search results if ((need_paged_results) && (Paged_Results != null)) { CachedDataManager.Store_Search_Results(Current_Mode, sort, actualCount, web_fields, terms, date1, date2, Paged_Results, Tracer); } } } else { // Try to pull more than one page, so we can cache the next page or so List<List<iSearch_Title_Result>> pagesOfResults; // Perform the search against the database try { Search_Results_Statistics recomputed_search_statistics; Perform_Database_Search(Tracer, terms, web_fields, date1, date2, actualCount, Current_Mode, sort, Aggregation_Object, results_per_page, Current_Mode.Use_Cache, out recomputed_search_statistics, out pagesOfResults, need_search_statistics); if (need_search_statistics) { Complete_Result_Set_Info = recomputed_search_statistics; foreach (Search_Facet_Collection thisFacet in Complete_Result_Set_Info.Facet_Collections) { Metadata_Search_Field field = Engine_ApplicationCache_Gateway.Settings.Metadata_Search_Field_By_ID(thisFacet.MetadataTypeID); thisFacet.MetadataTerm = field.Facet_Term; } } if ((pagesOfResults != null) && (pagesOfResults.Count > 0)) Paged_Results = pagesOfResults[0]; } catch (Exception ee) { // Next, show the message to the user Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception caught while performing database coordinate search"); Tracer.Add_Trace("ResultServices.Get_Search_Results", "Exception: " + ee.Message); Tracer.Add_Trace("ResultServices.Get_Search_Results", ee.StackTrace); return ee.Message.ToUpper().IndexOf("TIMEOUT") >= 0 ? ResultsEndpointErrorEnum.Database_Timeout_Exception : ResultsEndpointErrorEnum.Database_Exception; } // If this was a special search, don't cache this if (!Current_Mode.Use_Cache) { // Cache the search statistics, if it was needed if ((need_search_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Search_Result_Statistics(Current_Mode, actualCount, web_fields, terms, date1, date2, Complete_Result_Set_Info, Tracer); } // Cache the search results if ((need_paged_results) && (pagesOfResults != null)) { CachedDataManager.Store_Search_Results(Current_Mode, sort, actualCount, web_fields, terms, date1, date2, pagesOfResults, Tracer); } } } } } return ResultsEndpointErrorEnum.NONE; }
private static void Perform_Solr_Search(Custom_Tracer Tracer, List<string> Terms, List<string> Web_Fields, int ActualCount, string Current_Aggregation, int Current_Page, int Current_Sort, int Results_Per_Page, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Perform_Solr_Search", "Build the Solr query"); } // Step through all the terms and fields StringBuilder queryStringBuilder = new StringBuilder(); for (int i = 0; i < ActualCount; i++) { string web_field = Web_Fields[i]; string searchTerm = Terms[i]; string solr_field; if (i == 0) { // Skip any joiner for the very first field indicated if ((web_field[0] == '+') || (web_field[0] == '=') || (web_field[0] == '-')) { web_field = web_field.Substring(1); } // Try to get the solr field if (web_field == "TX") { solr_field = "fulltext:"; } else { Metadata_Search_Field field = Engine_ApplicationCache_Gateway.Settings.Metadata_Search_Field_By_Code(web_field.ToUpper()); if (field != null) { solr_field = field.Solr_Field + ":"; } else { solr_field = String.Empty; } } // Add the solr search string if (searchTerm.IndexOf(" ") > 0) { queryStringBuilder.Append("(" + solr_field + "\"" + searchTerm.Replace(":", "") + "\")"); } else { queryStringBuilder.Append("(" + solr_field + searchTerm.Replace(":", "") + ")"); } } else { // Add the joiner for this subsequent terms if ((web_field[0] == '+') || (web_field[0] == '=') || (web_field[0] == '-')) { switch (web_field[0]) { case '=': queryStringBuilder.Append(" OR "); break; case '+': queryStringBuilder.Append(" AND "); break; case '-': queryStringBuilder.Append(" NOT "); break; default: queryStringBuilder.Append(" AND "); break; } web_field = web_field.Substring(1); } else { queryStringBuilder.Append(" AND "); } // Try to get the solr field if (web_field == "TX") { solr_field = "fulltext:"; } else { Metadata_Search_Field field = Engine_ApplicationCache_Gateway.Settings.Metadata_Search_Field_By_Code(web_field.ToUpper()); if (field != null) { solr_field = field.Solr_Field + ":"; } else { solr_field = String.Empty; } } // Add the solr search string if (searchTerm.IndexOf(" ") > 0) { queryStringBuilder.Append("(" + solr_field + "\"" + searchTerm.Replace(":", "") + "\")"); } else { queryStringBuilder.Append("(" + solr_field + searchTerm.Replace(":", "") + ")"); } } } // Use this built query to query against Solr Solr_Documents_Searcher.Search(Current_Aggregation, queryStringBuilder.ToString(), Results_Per_Page, Current_Page, (ushort)Current_Sort, Tracer, out Complete_Result_Set_Info, out Paged_Results); }
/// <summary> Get a browse of all items in a user's folder </summary> /// <param name="UserID"> Primary key for this user in the database </param> /// <param name="FolderName"> Name of this user's folder </param> /// <param name="ResultsPerPage"> Number of results to return per "page" of results </param> /// <param name="ResultsPage"> Which page of results to return ( one-based, so the first page is page number of one )</param> /// <param name="IncludeFacets"> Flag indicates if facets should be included in the final result set</param> /// <param name="FacetTypes"> Primary key for the metadata types to include as facets (up to eight)</param> /// <param name="ReturnSearchStatistics"> Flag indicates whether to create and return statistics about the overall search results, generally set to TRUE for the first page requested and subsequently set to FALSE </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> /// <returns> List of items matching search </returns> /// <remarks> This calls the 'mySobek_Get_User_Folder_Browse' stored procedure</remarks> public static Single_Paged_Results_Args Get_User_Folder_Browse(int UserID, string FolderName, int ResultsPerPage, int ResultsPage, bool IncludeFacets, List<short> FacetTypes, bool ReturnSearchStatistics, Custom_Tracer Tracer) { if (Tracer != null) { Tracer.Add_Trace("Engine_Database.Get_User_Folder_Browse", String.Empty); } // Build the parameters List<EalDbParameter> parameters = new List<EalDbParameter> { new EalDbParameter("@userid", UserID), new EalDbParameter("@foldername", FolderName), new EalDbParameter("@pagesize", ResultsPerPage), new EalDbParameter("@pagenumber", ResultsPage), new EalDbParameter("@include_facets", IncludeFacets) }; if ((IncludeFacets) && (FacetTypes != null)) { parameters.Add(FacetTypes.Count > 0 ? new EalDbParameter("@facettype1", FacetTypes[0]) : new EalDbParameter("@facettype1", -1)); parameters.Add(FacetTypes.Count > 1 ? new EalDbParameter("@facettype2", FacetTypes[1]) : new EalDbParameter("@facettype2", -1)); parameters.Add(FacetTypes.Count > 2 ? new EalDbParameter("@facettype3", FacetTypes[2]) : new EalDbParameter("@facettype3", -1)); parameters.Add(FacetTypes.Count > 3 ? new EalDbParameter("@facettype4", FacetTypes[3]) : new EalDbParameter("@facettype4", -1)); parameters.Add(FacetTypes.Count > 4 ? new EalDbParameter("@facettype5", FacetTypes[4]) : new EalDbParameter("@facettype5", -1)); parameters.Add(FacetTypes.Count > 5 ? new EalDbParameter("@facettype6", FacetTypes[5]) : new EalDbParameter("@facettype6", -1)); parameters.Add(FacetTypes.Count > 6 ? new EalDbParameter("@facettype7", FacetTypes[6]) : new EalDbParameter("@facettype7", -1)); parameters.Add(FacetTypes.Count > 7 ? new EalDbParameter("@facettype8", FacetTypes[7]) : new EalDbParameter("@facettype8", -1)); } else { parameters.Add(new EalDbParameter("@facettype1", -1)); parameters.Add(new EalDbParameter("@facettype2", -1)); parameters.Add(new EalDbParameter("@facettype3", -1)); parameters.Add(new EalDbParameter("@facettype4", -1)); parameters.Add(new EalDbParameter("@facettype5", -1)); parameters.Add(new EalDbParameter("@facettype6", -1)); parameters.Add(new EalDbParameter("@facettype7", -1)); parameters.Add(new EalDbParameter("@facettype8", -1)); } // Add parameters for total items and total titles EalDbParameter totalItemsParameter = new EalDbParameter("@total_items", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalItemsParameter); EalDbParameter totalTitlesParameter = new EalDbParameter("@total_titles", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalTitlesParameter); // Create the database agnostic reader EalDbReaderWrapper readerWrapper = EalDbAccess.ExecuteDataReader(DatabaseType, Connection_String + "Connection Timeout=45", CommandType.StoredProcedure, "mySobek_Get_User_Folder_Browse", parameters); // Pull out the database reader DbDataReader reader = readerWrapper.Reader; // Create the return argument object List<string> metadataLabels = new List<string>(); Single_Paged_Results_Args returnArgs = new Single_Paged_Results_Args {Paged_Results = DataReader_To_Simple_Result_List2(reader, metadataLabels)}; // Create the overall search statistics? if (ReturnSearchStatistics) { Search_Results_Statistics stats = new Search_Results_Statistics(reader, FacetTypes, metadataLabels); returnArgs.Statistics = stats; readerWrapper.Close(); stats.Total_Items = Convert.ToInt32(totalItemsParameter.Value); stats.Total_Titles = Convert.ToInt32(totalTitlesParameter.Value); } else { // Close the reader (which also closes the connection) readerWrapper.Close(); } // Return the built result arguments return returnArgs; }
/// <summary> Performs geographic search for items within provided rectangular bounding box and linked to item aggregation of interest </summary> /// <param name="AggregationCode"> Code for the item aggregation of interest </param> /// <param name="Latitude1"> Latitudinal portion of the first point making up the rectangular bounding box</param> /// <param name="Longitude1"> Longitudinal portion of the first point making up the rectangular bounding box</param> /// <param name="Latitude2"> Latitudinal portion of the second point making up the rectangular bounding box</param> /// <param name="Longitude2"> Longitudinal portion of the second point making up the rectangular bounding box</param> /// <param name="IncludePrivateItems"> Flag indicates whether to include private items in the result set </param> /// <param name="ResultsPerPage"> Number of results to return per "page" of results </param> /// <param name="ResultsPage"> Which page of results to return ( one-based, so the first page is page number of one )</param> /// <param name="Sort"> Current sort to use ( 0 = default by search or browse, 1 = title, 10 = date asc, 11 = date desc )</param> /// <param name="IncludeFacets"> Flag indicates if facets should be included in the result set </param> /// <param name="FacetTypes"> Primary key for the metadata types to include as facets (up to eight)</param> /// <param name="ReturnSearchStatistics"> Flag indicates whether to create and return statistics about the overall search results, generally set to TRUE for the first page requested and subsequently set to FALSE </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> /// <returns> Table with all of the item and item group information within provided bounding box </returns> /// <remarks> This calls the 'SobekCM_Get_Items_By_Coordinates' stored procedure </remarks> public static Multiple_Paged_Results_Args Get_Items_By_Coordinates(string AggregationCode, double Latitude1, double Longitude1, double Latitude2, double Longitude2, bool IncludePrivateItems, int ResultsPerPage, int ResultsPage, int Sort, bool IncludeFacets, List<short> FacetTypes, bool ReturnSearchStatistics, Custom_Tracer Tracer) { if (Tracer != null) { Tracer.Add_Trace("Engine_Database.Get_Items_By_Coordinates", "Pulling data from database"); } // Build the parameters List<EalDbParameter> parameters = new List<EalDbParameter>(); parameters.Add(new EalDbParameter("@lat1", Latitude1)); parameters.Add(new EalDbParameter("@long1", Longitude1)); if ((Latitude1 == Latitude2) && (Longitude1 == Longitude2)) { parameters.Add(new EalDbParameter("@lat2", DBNull.Value)); parameters.Add(new EalDbParameter("@long2", DBNull.Value)); } else { parameters.Add(new EalDbParameter("@lat2", Latitude2)); parameters.Add(new EalDbParameter("@long2", Longitude2)); } parameters.Add(new EalDbParameter("@include_private", IncludePrivateItems)); parameters.Add(new EalDbParameter("@aggregationcode", AggregationCode)); parameters.Add(new EalDbParameter("@pagesize", ResultsPerPage)); parameters.Add(new EalDbParameter("@pagenumber", ResultsPage)); parameters.Add(new EalDbParameter("@sort", Sort)); parameters.Add(new EalDbParameter("@minpagelookahead", MIN_PAGE_LOOKAHEAD)); parameters.Add(new EalDbParameter("@maxpagelookahead", MAX_PAGE_LOOKAHEAD)); parameters.Add(new EalDbParameter("@lookahead_factor", LOOKAHEAD_FACTOR)); parameters.Add(new EalDbParameter("@include_facets", IncludeFacets)); if ((IncludeFacets) && (FacetTypes != null) && (ReturnSearchStatistics)) { parameters.Add(FacetTypes.Count > 0 ? new EalDbParameter("@facettype1", FacetTypes[0]) : new EalDbParameter("@facettype1", -1)); parameters.Add(FacetTypes.Count > 1 ? new EalDbParameter("@facettype2", FacetTypes[1]) : new EalDbParameter("@facettype2", -1)); parameters.Add(FacetTypes.Count > 2 ? new EalDbParameter("@facettype3", FacetTypes[2]) : new EalDbParameter("@facettype3", -1)); parameters.Add(FacetTypes.Count > 3 ? new EalDbParameter("@facettype4", FacetTypes[3]) : new EalDbParameter("@facettype4", -1)); parameters.Add(FacetTypes.Count > 4 ? new EalDbParameter("@facettype5", FacetTypes[4]) : new EalDbParameter("@facettype5", -1)); parameters.Add(FacetTypes.Count > 5 ? new EalDbParameter("@facettype6", FacetTypes[5]) : new EalDbParameter("@facettype6", -1)); parameters.Add(FacetTypes.Count > 6 ? new EalDbParameter("@facettype7", FacetTypes[6]) : new EalDbParameter("@facettype7", -1)); parameters.Add(FacetTypes.Count > 7 ? new EalDbParameter("@facettype8", FacetTypes[7]) : new EalDbParameter("@facettype8", -1)); } else { parameters.Add(new EalDbParameter("@facettype1", -1)); parameters.Add(new EalDbParameter("@facettype2", -1)); parameters.Add(new EalDbParameter("@facettype3", -1)); parameters.Add(new EalDbParameter("@facettype4", -1)); parameters.Add(new EalDbParameter("@facettype5", -1)); parameters.Add(new EalDbParameter("@facettype6", -1)); parameters.Add(new EalDbParameter("@facettype7", -1)); parameters.Add(new EalDbParameter("@facettype8", -1)); } // Add parameters for total items and total titles EalDbParameter totalItemsParameter = new EalDbParameter("@total_items", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalItemsParameter); EalDbParameter totalTitlesParameter = new EalDbParameter("@total_titles", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalTitlesParameter); // Create the database agnostic reader EalDbReaderWrapper readerWrapper = EalDbAccess.ExecuteDataReader(DatabaseType, Connection_String + "Connection Timeout=45", CommandType.StoredProcedure, "SobekCM_Get_Items_By_Coordinates", parameters); // Pull out the database reader DbDataReader reader = readerWrapper.Reader; List<string> metadataFields = new List<string>(); // Create the return argument object Multiple_Paged_Results_Args returnArgs = new Multiple_Paged_Results_Args {Paged_Results = DataReader_To_Result_List_With_LookAhead2(reader, ResultsPerPage, metadataFields)}; // Create the overall search statistics? if (ReturnSearchStatistics) { Search_Results_Statistics stats = new Search_Results_Statistics(reader, FacetTypes, metadataFields); returnArgs.Statistics = stats; readerWrapper.Close(); stats.Total_Items = Convert.ToInt32(totalItemsParameter.Value); stats.Total_Titles = Convert.ToInt32(totalTitlesParameter.Value); } else { // Close the reader (which also closes the connection) readerWrapper.Close(); } // Return the built result arguments return returnArgs; }
/// <summary> Perform an search for documents with matching parameters </summary> /// <param name="AggregationCode"> Aggregation code within which to search </param> /// <param name="QueryString"> Quert string for the actual search to perform aggainst the Solr/Lucene engine </param> /// <param name="ResultsPerPage"> Number of results to display per a "page" of results </param> /// <param name="Page_Number"> Which page of results to return ( one-based, so the first page is page number of one )</param> /// <param name="Sort"> Sort to apply before returning the results of the search </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> /// <param name="Complete_Result_Set_Info"> [OUT] Information about the entire set of results </param> /// <param name="Paged_Results"> [OUT] List of search results for the requested page of results </param> /// <returns> Page search result object with all relevant result information </returns> public static bool Search(string AggregationCode, string QueryString, int ResultsPerPage, int Page_Number, ushort Sort, Custom_Tracer Tracer, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results) { if (Tracer != null) { Tracer.Add_Trace("Solr_Documents_Searcher.Search", String.Empty); } // Set output initially to null Paged_Results = new List<iSearch_Title_Result>(); Complete_Result_Set_Info = null; try { // Ensure page is not erroneously set to zero or negative if (Page_Number <= 0) Page_Number = 1; // Create the solr worker to query the document index var solrWorker = Solr_Operations_Cache<Solr_Document_Result>.GetSolrOperations(Engine_ApplicationCache_Gateway.Settings.Servers.Document_Solr_Index_URL); // Create the query options QueryOptions options = new QueryOptions { Rows = ResultsPerPage, Start = (Page_Number - 1) * ResultsPerPage, Fields = new[] { "did", "score","url","aleph","donor","edition","format","holdinglocation","sourceinstitution","maintitle","materialtype","oclc","pubdate_display","author_display","publisher_display","mainthumbnail" }, Highlight = new HighlightingParameters { Fields = new[] { "fulltext" }, }, ExtraParams = new Dictionary<string, string> { { "hl.useFastVectorHighlighter", "true" } } }; // Set the sort value if (Sort != 0) { options.OrderBy.Clear(); switch (Sort) { case 1: options.OrderBy.Add(new SortOrder("maintitle_sort")); break; case 2: options.OrderBy.Add(new SortOrder("bibid", Order.ASC) ); break; case 3: options.OrderBy.Add(new SortOrder("bibid", Order.DESC)); break; case 10: options.OrderBy.Add(new SortOrder("pubdate", Order.ASC)); break; case 11: options.OrderBy.Add(new SortOrder("pubdate", Order.DESC)); break; } } // If there was an aggregation code included, put that at the beginning of the search if ((AggregationCode.Length > 0) && (AggregationCode.ToUpper() != "ALL")) { QueryString = "(aggregation_code:" + AggregationCode.ToUpper() + ")AND(" + QueryString + ")"; } // Perform this search SolrQueryResults<Solr_Document_Result> results = solrWorker.Query(QueryString, options); // Create the search statistcs List<string> metadataLabels = new List<string> {"Author", "Publisher", "Format", "Edition", "Institution", "Donor"}; Complete_Result_Set_Info = new Search_Results_Statistics(metadataLabels) { Total_Titles = results.NumFound, Total_Items = results.NumFound, QueryTime = results.Header.QTime }; // Pass all the results into the List and add the highlighted text to each result as well foreach (Solr_Document_Result thisResult in results) { // Add the highlight snipper if ((results.Highlights.ContainsKey(thisResult.DID)) && (results.Highlights[thisResult.DID].Count > 0) && (results.Highlights[thisResult.DID].ElementAt(0).Value.Count > 0)) { thisResult.Snippet = results.Highlights[thisResult.DID].ElementAt(0).Value.ElementAt(0); } // Add this results Paged_Results.Add(thisResult); } return true; } catch { return false; } }
/// <summary> Returns the list of all items/titles which match a given OCLC number </summary> /// <param name="OclcNumber"> OCLC number to look for matching items </param> /// <param name="IncludePrivateItems"> Flag indicates whether to include private items in the result set </param> /// <param name="ResultsPerPage"> Number of results to return per "page" of results </param> /// <param name="Sort"> Current sort to use ( 0 = default by search or browse, 1 = title, 10 = date asc, 11 = date desc )</param> /// <param name="ReturnSearchStatistics"> Flag indicates whether to create and return statistics about the overall search results, generally set to TRUE for the first page requested and subsequently set to FALSE </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> /// <returns> Table with all of the item and item group information which matches the OCLC number </returns> /// <remarks> This calls the 'SobekCM_Items_By_OCLC' stored procedure </remarks> public static Multiple_Paged_Results_Args Items_By_OCLC_Number(long OclcNumber, bool IncludePrivateItems, int ResultsPerPage, int Sort, bool ReturnSearchStatistics, Custom_Tracer Tracer) { if (Tracer != null) { Tracer.Add_Trace("Engine_Database.Items_By_OCLC_Number", "Searching by OCLC in the database"); } // Build the parameter list EalDbParameter[] paramList = new EalDbParameter[5]; paramList[0] = new EalDbParameter("@oclc_number", OclcNumber); paramList[1] = new EalDbParameter("@include_private", IncludePrivateItems); paramList[2] = new EalDbParameter("@sort", Sort); paramList[3] = new EalDbParameter("@total_items", 0) {Direction = ParameterDirection.InputOutput}; paramList[4] = new EalDbParameter("@total_titles", 0) {Direction = ParameterDirection.InputOutput}; // Create the database agnostic reader EalDbReaderWrapper readerWrapper = EalDbAccess.ExecuteDataReader(DatabaseType, Connection_String, CommandType.StoredProcedure, "SobekCM_Items_By_OCLC", paramList); // Pull out the database reader DbDataReader reader = readerWrapper.Reader; // Create the return argument object List<string> metadataFields = new List<string>(); Multiple_Paged_Results_Args returnArgs = new Multiple_Paged_Results_Args { Paged_Results = DataReader_To_Result_List_With_LookAhead2(reader, ResultsPerPage, metadataFields) }; // Create the overall search statistics? if (ReturnSearchStatistics) { Search_Results_Statistics stats = new Search_Results_Statistics(reader, null, metadataFields); returnArgs.Statistics = stats; readerWrapper.Close(); stats.Total_Items = Convert.ToInt32(paramList[3].Value); stats.Total_Titles = Convert.ToInt32(paramList[4].Value); } else { // Close the reader (which also closes the connection) readerWrapper.Close(); } // Return the built results return returnArgs; }
/// <summary> Constructor for a new instance of the Single_Paged_Results_Args class </summary> /// <param name="Statistics"> Statistics/information about the overall search or browse, including initial query time, complete results counts, and facets </param> /// <param name="Paged_Results"> Single page of results, which is collection of search title results </param> public Single_Paged_Results_Args(Search_Results_Statistics Statistics, List<iSearch_Title_Result> Paged_Results) { this.Statistics = Statistics; this.Paged_Results = Paged_Results; }
/// <summary> Performs a basic metadata search over the entire citation, given a search condition, and returns one page of results </summary> /// <param name="SearchCondition"> Search condition string to be run against the databasse </param> /// <param name="IncludePrivateItems"> Flag indicates whether to include private items in the result set </param> /// <param name="AggregationCode"> Code for the aggregation of interest ( or empty string to search all aggregationPermissions )</param> /// <param name="DateRangeStart"> If this search includes a date range search, start of the date range, or -1</param> /// <param name="DateRangeEnd"> If this search includes a date range search, end of the date range, or -1</param> /// <param name="ResultsPerPage"> Number of results to return per "page" of results </param> /// <param name="ResultsPage"> Which page of results to return ( one-based, so the first page is page number of one )</param> /// <param name="Sort"> Current sort to use ( 0 = default by search or browse, 1 = title, 10 = date asc, 11 = date desc )</param> /// <param name="IncludeFacets"> Flag indicates whether to include facets in the result set </param> /// <param name="FacetTypes"> Primary key for the metadata types to include as facets (up to eight)</param> /// <param name="ReturnSearchStatistics"> Flag indicates whether to create and return statistics about the overall search results, generally set to TRUE for the first page requested and subsequently set to FALSE </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> /// <returns> Small arguments object which contains the page of results and optionally statistics about results for the entire search, including complete counts and facet information </returns> /// <remarks> This calls the 'SobekCM_Metadata_Basic_Search_Paged' stored procedure </remarks> public static Multiple_Paged_Results_Args Perform_Metadata_Search_Paged(string SearchCondition, bool IncludePrivateItems, string AggregationCode, long DateRangeStart, long DateRangeEnd, int ResultsPerPage, int ResultsPage, int Sort, bool IncludeFacets, List<short> FacetTypes, bool ReturnSearchStatistics, Custom_Tracer Tracer) { if (Tracer != null) { Tracer.Add_Trace("Engine_Database.Perform_Basic_Search_Paged", "Performing basic search in database ( stored procedure SobekCM_Metadata_Basic_Search_Paged2 )"); } if (AggregationCode.ToUpper() == "ALL") AggregationCode = String.Empty; // Build the list of parameters List<EalDbParameter> parameters = new List<EalDbParameter> { new EalDbParameter("@searchcondition", SearchCondition.Replace("''", "'")), new EalDbParameter("@include_private", IncludePrivateItems), new EalDbParameter("@aggregationcode", AggregationCode), new EalDbParameter("@daterange_start", DateRangeStart), new EalDbParameter("@daterange_end", DateRangeEnd), new EalDbParameter("@pagesize", ResultsPerPage), new EalDbParameter("@pagenumber", ResultsPage), new EalDbParameter("@sort", Sort) }; // If this is for more than 100 results, don't look ahead if (ResultsPerPage > 100) { parameters.Add(new EalDbParameter("@minpagelookahead", 1)); parameters.Add(new EalDbParameter("@maxpagelookahead", 1)); parameters.Add(new EalDbParameter("@lookahead_factor", LOOKAHEAD_FACTOR)); } else { parameters.Add(new EalDbParameter("@minpagelookahead", MIN_PAGE_LOOKAHEAD)); parameters.Add(new EalDbParameter("@maxpagelookahead", MAX_PAGE_LOOKAHEAD)); parameters.Add(new EalDbParameter("@lookahead_factor", LOOKAHEAD_FACTOR)); } if ((IncludeFacets) && (FacetTypes != null) && (FacetTypes.Count > 0) && (ReturnSearchStatistics)) { parameters.Add(new EalDbParameter("@include_facets", true)); parameters.Add(FacetTypes.Count > 0 ? new EalDbParameter("@facettype1", FacetTypes[0]) : new EalDbParameter("@facettype1", -1)); parameters.Add(FacetTypes.Count > 1 ? new EalDbParameter("@facettype2", FacetTypes[1]) : new EalDbParameter("@facettype2", -1)); parameters.Add(FacetTypes.Count > 2 ? new EalDbParameter("@facettype3", FacetTypes[2]) : new EalDbParameter("@facettype3", -1)); parameters.Add(FacetTypes.Count > 3 ? new EalDbParameter("@facettype4", FacetTypes[3]) : new EalDbParameter("@facettype4", -1)); parameters.Add(FacetTypes.Count > 4 ? new EalDbParameter("@facettype5", FacetTypes[4]) : new EalDbParameter("@facettype5", -1)); parameters.Add(FacetTypes.Count > 5 ? new EalDbParameter("@facettype6", FacetTypes[5]) : new EalDbParameter("@facettype6", -1)); parameters.Add(FacetTypes.Count > 6 ? new EalDbParameter("@facettype7", FacetTypes[6]) : new EalDbParameter("@facettype7", -1)); parameters.Add(FacetTypes.Count > 7 ? new EalDbParameter("@facettype8", FacetTypes[7]) : new EalDbParameter("@facettype8", -1)); } else { parameters.Add(new EalDbParameter("@include_facets", false)); parameters.Add(new EalDbParameter("@facettype1", -1)); parameters.Add(new EalDbParameter("@facettype2", -1)); parameters.Add(new EalDbParameter("@facettype3", -1)); parameters.Add(new EalDbParameter("@facettype4", -1)); parameters.Add(new EalDbParameter("@facettype5", -1)); parameters.Add(new EalDbParameter("@facettype6", -1)); parameters.Add(new EalDbParameter("@facettype7", -1)); parameters.Add(new EalDbParameter("@facettype8", -1)); } // Add parameters for total items and total titles EalDbParameter totalItemsParameter = new EalDbParameter("@total_items", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalItemsParameter); EalDbParameter totalTitlesParameter = new EalDbParameter("@total_titles", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalTitlesParameter); // Add parameters for items and titles if this search is expanded to include all aggregationPermissions EalDbParameter expandedItemsParameter = new EalDbParameter("@all_collections_items", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(expandedItemsParameter); EalDbParameter expandedTitlesParameter = new EalDbParameter("@all_collections_titles", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(expandedTitlesParameter); // Create the database agnostic reader EalDbReaderWrapper readerWrapper = EalDbAccess.ExecuteDataReader(DatabaseType, Connection_String + "Connection Timeout=45", CommandType.StoredProcedure, "SobekCM_Metadata_Basic_Search_Paged2", parameters); // Pull out the database reader DbDataReader reader = readerWrapper.Reader; // Create the return argument object List<string> metadataLabels = new List<string>(); Multiple_Paged_Results_Args returnArgs = new Multiple_Paged_Results_Args { Paged_Results = DataReader_To_Result_List_With_LookAhead2(reader, ResultsPerPage, metadataLabels) }; // Create the overall search statistics? if (ReturnSearchStatistics) { Search_Results_Statistics stats = new Search_Results_Statistics(reader, FacetTypes, metadataLabels); returnArgs.Statistics = stats; readerWrapper.Close(); stats.Total_Items = Convert.ToInt32(totalItemsParameter.Value); stats.Total_Titles = Convert.ToInt32(totalTitlesParameter.Value); int allItems = Convert.ToInt32(expandedItemsParameter.Value); int allTitles = Convert.ToInt32(expandedTitlesParameter.Value); if (allItems > 0) stats.All_Collections_Items = allItems; if (allTitles > 0) stats.All_Collections_Titles = allTitles; foreach (Search_Facet_Collection thisFacet in stats.Facet_Collections) { Metadata_Search_Field field = Engine_ApplicationCache_Gateway.Settings.Metadata_Search_Field_By_ID(thisFacet.MetadataTypeID); thisFacet.MetadataTerm = field.Facet_Term; } } else { // Close the reader (which also closes the connection) readerWrapper.Close(); } // Return the built result arguments return returnArgs; }
private void Perform_Database_Search(Custom_Tracer Tracer, List<string> Terms, List<string> Web_Fields, long Date1, long Date2, int ActualCount, Results_Arguments Current_Mode, int Current_Sort, Complete_Item_Aggregation Aggregation_Object, int Results_Per_Page, bool Potentially_Include_Facets, out Search_Results_Statistics Complete_Result_Set_Info, out List<List<iSearch_Title_Result>> Paged_Results, bool Need_Search_Statistics) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Perform_Database_Search", "Query the database for search results"); } // Get the list of facets first List<short> facetsList = Aggregation_Object.Facets; if (!Potentially_Include_Facets) facetsList.Clear(); // Set the return values to NULL initially Complete_Result_Set_Info = null; const bool INCLUDE_PRIVATE = false; List<short> links = new List<short>(); List<short> db_fields = new List<short>(); List<string> db_terms = Terms.ToList(); // Step through all the web fields and convert to db fields for (int i = 0; i < ActualCount; i++) { if (Web_Fields[i].Length > 1) { // Find the joiner if ((Web_Fields[i][0] == '+') || (Web_Fields[i][0] == '=') || (Web_Fields[i][0] == '-')) { if (Web_Fields[i][0] == '+') links.Add(0); if (Web_Fields[i][0] == '=') links.Add(1); if (Web_Fields[i][0] == '-') links.Add(2); Web_Fields[i] = Web_Fields[i].Substring(1); } else { links.Add(0); } // Find the db field number db_fields.Add(Metadata_Field_Number(Web_Fields[i])); } // Also add starting and ending quotes to all the valid searches if (db_terms[i].Length > 0) { if ((db_terms[i].IndexOf("\"") < 0) && (db_terms[i].IndexOf(" ") < 0)) { // Since this is a single word, see what type of special codes to include switch (Current_Mode.Search_Precision) { case Search_Precision_Type_Enum.Contains: db_terms[i] = "\"" + db_terms[i] + "\""; break; case Search_Precision_Type_Enum.Inflectional_Form: // If there are any non-characters, don't use inflectional for this term bool inflectional = db_terms[i].All(Char.IsLetter); if (inflectional) { db_terms[i] = "FORMSOF(inflectional," + db_terms[i] + ")"; } else { db_terms[i] = "\"" + db_terms[i] + "\""; } break; case Search_Precision_Type_Enum.Synonmic_Form: // If there are any non-characters, don't use thesaurus for this term bool thesaurus = db_terms[i].All(Char.IsLetter); if (thesaurus) { db_terms[i] = "FORMSOF(thesaurus," + db_terms[i] + ")"; } else { db_terms[i] = "\"" + db_terms[i] + "\""; } break; } } else { if (Current_Mode.Search_Precision != Search_Precision_Type_Enum.Exact_Match) { db_terms[i] = "\"" + db_terms[i] + "\""; } } } } // Get the page count in the results int current_page_index =Current_Mode.Page; // If this is an exact match, just do the search if (Current_Mode.Search_Precision == Search_Precision_Type_Enum.Exact_Match) { Multiple_Paged_Results_Args returnArgs = Engine_Database.Perform_Metadata_Exact_Search_Paged(db_terms[0], db_fields[0], INCLUDE_PRIVATE, Current_Mode.Aggregation, Date1, Date2, Results_Per_Page, current_page_index, Current_Sort, Need_Search_Statistics, facetsList, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; } else { // Finish filling up the fields and links while (links.Count < 10) links.Add(0); while (db_fields.Count < 10) db_fields.Add(-1); while (db_terms.Count < 10) db_terms.Add(String.Empty); // See if this is a simple search, which can use a more optimized search routine bool simplified_search = db_fields.All(Field => (Field <= 0)); // Perform either the simpler metadata search, or the more complex if (simplified_search) { StringBuilder searchBuilder = new StringBuilder(); for (int i = 0; i < db_terms.Count; i++) { if (db_terms[i].Length > 0) { if (i > 0) { if (i > links.Count) { searchBuilder.Append(" AND "); } else { switch (links[i - 1]) { case 0: searchBuilder.Append(" AND "); break; case 1: searchBuilder.Append(" OR "); break; case 2: searchBuilder.Append(" AND NOT "); break; } } } searchBuilder.Append(db_terms[i]); } } Multiple_Paged_Results_Args returnArgs = Engine_Database.Perform_Metadata_Search_Paged(searchBuilder.ToString(), INCLUDE_PRIVATE, Current_Mode.Aggregation, Date1, Date2, Results_Per_Page, current_page_index, Current_Sort, Need_Search_Statistics, facetsList, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; } else { // Perform search in the database Multiple_Paged_Results_Args returnArgs = Engine_Database.Perform_Metadata_Search_Paged(links[0], db_terms[0], db_fields[0], links[1], db_terms[1], db_fields[1], links[2], db_terms[2], db_fields[2], links[3], db_terms[3], db_fields[3], links[4], db_terms[4], db_fields[4], links[5], db_terms[5], db_fields[5], links[6], db_terms[6], db_fields[6], links[7], db_terms[7], db_fields[7], links[8], db_terms[8], db_fields[8], links[9], db_terms[9], db_fields[9], INCLUDE_PRIVATE, Current_Mode.Aggregation, Date1, Date2, Results_Per_Page, current_page_index, Current_Sort, Need_Search_Statistics, facetsList, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; } } }
/// <summary> Gets the collection of all (public) items in the library </summary> /// <param name="SinceDate"> Date from which to pull the data </param> /// <param name="IncludePrivateItems"> Flag indicates whether to include private items in the result set </param> /// <param name="ResultsPerPage"> Number of results to return per "page" of results </param> /// <param name="ResultsPage"> Which page of results to return ( one-based, so the first page is page number of one )</param> /// <param name="Sort"> Current sort to use ( 0 = default by search or browse, 1 = title, 10 = date asc, 11 = date desc )</param> /// <param name="IncludeFacets"> Flag indicates if facets should be included in the final result set</param> /// <param name="FacetTypes"> Primary key for the metadata types to include as facets (up to eight)</param> /// <param name="ReturnSearchStatistics"> Flag indicates whether to create and return statistics about the overall search results, generally set to TRUE for the first page requested and subsequently set to FALSE </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> /// <returns> Table with all of the item and item group information </returns> /// <remarks> This calls the 'SobekCM_Get_All_Browse_Paged2' stored procedure </remarks> public static Multiple_Paged_Results_Args Get_All_Browse_Paged(string SinceDate, bool IncludePrivateItems, int ResultsPerPage, int ResultsPage, int Sort, bool IncludeFacets, List<short> FacetTypes, bool ReturnSearchStatistics, Custom_Tracer Tracer) { if (Tracer != null) { Tracer.Add_Trace("Engine_Database.Get_All_Browse_Paged", "Pulling browse from database"); } // Create the parameter list List<EalDbParameter> parameters = new List<EalDbParameter> { SinceDate.Length > 0 ? new EalDbParameter("@date", SinceDate) : new EalDbParameter("@date", DBNull.Value), new EalDbParameter("@include_private", IncludePrivateItems), new EalDbParameter("@pagesize", ResultsPerPage), new EalDbParameter("@pagenumber", ResultsPage), new EalDbParameter("@sort", Sort), new EalDbParameter("@minpagelookahead", MIN_PAGE_LOOKAHEAD), new EalDbParameter("@maxpagelookahead", MAX_PAGE_LOOKAHEAD), new EalDbParameter("@lookahead_factor", LOOKAHEAD_FACTOR), new EalDbParameter("@include_facets", IncludeFacets) }; if ((IncludeFacets) && (FacetTypes != null)) { parameters.Add(FacetTypes.Count > 0 ? new EalDbParameter("@facettype1", FacetTypes[0]) : new EalDbParameter("@facettype1", -1)); parameters.Add(FacetTypes.Count > 1 ? new EalDbParameter("@facettype2", FacetTypes[1]) : new EalDbParameter("@facettype2", -1)); parameters.Add(FacetTypes.Count > 2 ? new EalDbParameter("@facettype3", FacetTypes[2]) : new EalDbParameter("@facettype3", -1)); parameters.Add(FacetTypes.Count > 3 ? new EalDbParameter("@facettype4", FacetTypes[3]) : new EalDbParameter("@facettype4", -1)); parameters.Add(FacetTypes.Count > 4 ? new EalDbParameter("@facettype5", FacetTypes[4]) : new EalDbParameter("@facettype5", -1)); parameters.Add(FacetTypes.Count > 5 ? new EalDbParameter("@facettype6", FacetTypes[5]) : new EalDbParameter("@facettype6", -1)); parameters.Add(FacetTypes.Count > 6 ? new EalDbParameter("@facettype7", FacetTypes[6]) : new EalDbParameter("@facettype7", -1)); parameters.Add(FacetTypes.Count > 7 ? new EalDbParameter("@facettype8", FacetTypes[7]) : new EalDbParameter("@facettype8", -1)); } else { parameters.Add(new EalDbParameter("@facettype1", -1)); parameters.Add(new EalDbParameter("@facettype2", -1)); parameters.Add(new EalDbParameter("@facettype3", -1)); parameters.Add(new EalDbParameter("@facettype4", -1)); parameters.Add(new EalDbParameter("@facettype5", -1)); parameters.Add(new EalDbParameter("@facettype6", -1)); parameters.Add(new EalDbParameter("@facettype7", -1)); parameters.Add(new EalDbParameter("@facettype8", -1)); } parameters.Add(new EalDbParameter("@item_count_to_use_cached", 1000)); // Add parameters for total items and total titles EalDbParameter totalItemsParameter = new EalDbParameter("@total_items", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalItemsParameter); EalDbParameter totalTitlesParameter = new EalDbParameter("@total_titles", 0) {Direction = ParameterDirection.InputOutput}; parameters.Add(totalTitlesParameter); Multiple_Paged_Results_Args returnArgs; try { // Create the database agnostic reader EalDbReaderWrapper readerWrapper = EalDbAccess.ExecuteDataReader(DatabaseType, Connection_String + ";Connection Timeout=45", CommandType.StoredProcedure, "SobekCM_Get_All_Browse_Paged2", parameters); // Pull out the database reader DbDataReader reader = readerWrapper.Reader; // Create the return argument object List<string> metadataLabels = new List<string>(); returnArgs = new Multiple_Paged_Results_Args { Paged_Results = DataReader_To_Result_List_With_LookAhead2(reader, ResultsPerPage, metadataLabels) }; // Create the overall search statistics? if (ReturnSearchStatistics) { Search_Results_Statistics stats = new Search_Results_Statistics(reader, FacetTypes, metadataLabels); returnArgs.Statistics = stats; readerWrapper.Close(); stats.Total_Items = Convert.ToInt32(totalItemsParameter.Value); stats.Total_Titles = Convert.ToInt32(totalTitlesParameter.Value); } else { // Close the reader (which also closes the connection) readerWrapper.Close(); } } catch (Exception ee) { Last_Exception = ee; if (Tracer != null) { Tracer.Add_Trace("Engine_Database.Get_All_Browse_Paged", "Exception caught during database work", Custom_Trace_Type_Enum.Error); Tracer.Add_Trace("Engine_Database.Get_All_Browse_Paged", ee.Message, Custom_Trace_Type_Enum.Error); Tracer.Add_Trace("Engine_Database.Get_All_Browse_Paged", ee.StackTrace, Custom_Trace_Type_Enum.Error); } throw; } // Return the built result arguments return returnArgs; }
/// <summary> Writes the search or browse information in JSON format directly to the output stream </summary> /// <param name="Output"> Stream to which to write the JSON search or browse information </param> /// <param name="Args"></param> /// <param name="ResultsStats"></param> /// <param name="ResultsPage"></param> protected internal void legacy_json_display_search_results(TextWriter Output, Results_Arguments Args, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> ResultsPage) { // If results are null, or no results, return empty string if ((ResultsPage == null) || (ResultsStats == null) || (ResultsStats.Total_Items <= 0)) return; // Get the URL and network roots string image_url = Engine_ApplicationCache_Gateway.Settings.Servers.Image_URL; string base_url = Engine_ApplicationCache_Gateway.Settings.Servers.Base_URL; if (HttpContext.Current != null) { base_url = HttpContext.Current.Request.Url.AbsoluteUri; if (base_url.IndexOf("?") > 0) base_url = base_url.Substring(0, base_url.IndexOf("?")).Replace("sobekcm.svc", ""); } if ((base_url.Length > 0) && (base_url[base_url.Length - 1] != '/')) base_url = base_url + "/"; if ((image_url.Length > 0) && (image_url[image_url.Length - 1] != '/')) image_url = image_url + "/"; Output.Write("["); // Step through all the results int i = 1; foreach (iSearch_Title_Result titleResult in ResultsPage) { // Always get the first item for things like the main link and thumbnail iSearch_Item_Result firstItemResult = titleResult.Get_Item(0); // Determine a thumbnail string thumb = image_url + titleResult.BibID.Substring(0, 2) + "/" + titleResult.BibID.Substring(2, 2) + "/" + titleResult.BibID.Substring(4, 2) + "/" + titleResult.BibID.Substring(6, 2) + "/" + titleResult.BibID.Substring(8) + "/" + firstItemResult.VID + "/" + firstItemResult.MainThumbnail; if ((thumb.ToUpper().IndexOf(".JPG") < 0) && (thumb.ToUpper().IndexOf(".GIF") < 0)) { thumb = String.Empty; } thumb = thumb.Replace("\\", "/").Replace("//", "/").Replace("http:/", "http://"); // Was a previous item/title included here? if (i > 1) Output.Write(","); Output.Write("{\"collection_item\":{\"name\":\"" + firstItemResult.Title.Trim().Replace("\"", "'") + "\",\"url\":\"" + base_url + titleResult.BibID + "/" + firstItemResult.VID + "\",\"collection_code\":\"\",\"id\":\"" + titleResult.BibID + "_" + firstItemResult.VID + "\",\"thumb_url\":\"" + thumb + "\"}}"); i++; } Output.Write("]"); }
/// <summary> Retrieve the (assummed private) user folder browse by user and folder name </summary> /// <param name="Folder_Name"> Name of the folder to retieve the browse for </param> /// <param name="User_ID"> ID for the user </param> /// <param name="Results_Per_Page"> Number of results to display in this page (set higher if EXPORT is chosen)</param> /// <param name="ResultsPage">Which page of results to return ( one-based, so the first page is page number of one )</param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> /// <param name="Complete_Result_Set_Info"> [OUT] Information about the entire set of results </param> /// <param name="Paged_Results"> [OUT] List of search results for the requested page of results </param> /// <returns> TRUE if successful, otherwise FALSE </returns> /// <remarks> This attempts to pull the objects from the cache. If unsuccessful, it builds the objects from the /// database and hands off to the <see cref="CachedDataManager" /> to store in the cache </remarks> public bool Get_User_Folder( string Folder_Name, int User_ID, int Results_Per_Page, int ResultsPage, Custom_Tracer Tracer, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results ) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_User_Folder", String.Empty); } // Look to see if the browse statistics are available on any cache for this browse bool need_browse_statistics = true; Complete_Result_Set_Info = CachedDataManager.Retrieve_User_Folder_Browse_Statistics(User_ID, Folder_Name, Tracer); if (Complete_Result_Set_Info != null) need_browse_statistics = false; // Look to see if the paged results are available on any cache.. bool need_paged_results = true; Paged_Results = CachedDataManager.Retrieve_User_Folder_Browse(User_ID, Folder_Name, ResultsPage, Results_Per_Page, Tracer); if (Paged_Results != null) need_paged_results = false; // Was a copy found in the cache? if ((!need_browse_statistics) && (!need_paged_results)) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_User_Folder", "Browse statistics and paged results retrieved from cache"); } } else { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_User_Folder", "Building results information"); } Single_Paged_Results_Args returnArgs = Engine_Database.Get_User_Folder_Browse(User_ID, Folder_Name, Results_Per_Page, ResultsPage, false, new List<short>(), need_browse_statistics, Tracer); if (need_browse_statistics) { Complete_Result_Set_Info = returnArgs.Statistics; } Paged_Results = returnArgs.Paged_Results; // Save the overall result set statistics to the cache if something was pulled if ((need_browse_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_User_Folder_Browse_Statistics(User_ID, Folder_Name, Complete_Result_Set_Info, Tracer); } // Save the overall result set statistics to the cache if something was pulled if ((need_paged_results) && (Paged_Results != null)) { CachedDataManager.Store_User_Folder_Browse(User_ID, Folder_Name, ResultsPage, Results_Per_Page, Paged_Results, Tracer); } } return true; }
/// <summary> Gets the browse or info object and any other needed data for display ( text to display) </summary> /// <param name="Current_Mode"> Mode / navigation information for the current request</param> /// <param name="Aggregation_Object"> Item Aggregation object</param> /// <param name="Base_Directory"> Base directory location under which the the CMS/info source file will be found</param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> /// <param name="Browse_Object"> [OUT] Stores all the information about this browse or info </param> /// <param name="Complete_Result_Set_Info"> [OUT] Information about the entire set of results </param> /// <param name="Paged_Results"> [OUT] List of search results for the requested page of results </param> /// <param name="Browse_Info_Display_Text"> [OUT] Static HTML-based content to be displayed if this is browing a staticly created html source file </param> /// <returns> TRUE if successful, otherwise FALSE </returns> /// <remarks> This attempts to pull the objects from the cache. If unsuccessful, it builds the objects from the /// database and hands off to the <see cref="CachedDataManager" /> to store in the cache </remarks> protected static bool Get_Browse_Info(Navigation_Object Current_Mode, Item_Aggregation Aggregation_Object, string Base_Directory, Custom_Tracer Tracer, out Item_Aggregation_Child_Page Browse_Object, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results, out HTML_Based_Content Browse_Info_Display_Text) { if (Tracer != null) { Tracer.Add_Trace("abstractHtmlSubwriter.Get_Browse_Info", String.Empty); } // Set output initially to null Paged_Results = null; Complete_Result_Set_Info = null; Browse_Info_Display_Text = null; // First, make sure the browse submode is valid Browse_Object = Aggregation_Object.Child_Page_By_Code(Current_Mode.Info_Browse_Mode); if (Browse_Object == null) { Current_Mode.Error_Message = "Unable to retrieve browse/info item '" + Current_Mode.Info_Browse_Mode + "'"; return false; } // Is this a table result, or a string? switch (Browse_Object.Source_Data_Type) { case Item_Aggregation_Child_Source_Data_Enum.Database_Table: // Set the current sort to ZERO, if currently set to ONE and this is an ALL BROWSE. // Those two sorts are the same in this case int sort = Current_Mode.Sort.HasValue ? Math.Max(Current_Mode.Sort.Value, ((ushort)1)) : 1; if ((sort == 0) && (Browse_Object.Code == "all")) sort = 1; // Special code if this is a JSON browse string browse_code = Current_Mode.Info_Browse_Mode; if (Current_Mode.Writer_Type == Writer_Type_Enum.JSON) { browse_code = browse_code + "_JSON"; sort = 12; } // Get the page count in the results int current_page_index = Current_Mode.Page.HasValue ? Math.Max(Current_Mode.Page.Value, ((ushort)1)) : 1; // Determine if this is a special search type which returns more rows and is not cached. // This is used to return the results as XML and DATASET bool special_search_type = false; int results_per_page = 20; if ((Current_Mode.Writer_Type == Writer_Type_Enum.XML) || (Current_Mode.Writer_Type == Writer_Type_Enum.DataSet)) { results_per_page = 1000000; special_search_type = true; sort = 2; // Sort by BibID always for these } // Set the flags for how much data is needed. (i.e., do we need to pull ANYTHING? or // perhaps just the next page of results ( as opposed to pulling facets again). bool need_browse_statistics = true; bool need_paged_results = true; if (!special_search_type) { // Look to see if the browse statistics are available on any cache for this browse Complete_Result_Set_Info = CachedDataManager.Retrieve_Browse_Result_Statistics(Aggregation_Object.Code, browse_code, Tracer); if (Complete_Result_Set_Info != null) need_browse_statistics = false; // Look to see if the paged results are available on any cache.. Paged_Results = CachedDataManager.Retrieve_Browse_Results(Aggregation_Object.Code, browse_code, current_page_index, sort, Tracer); if (Paged_Results != null) need_paged_results = false; } // Was a copy found in the cache? if ((!need_browse_statistics) && (!need_paged_results)) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_Browse_Info", "Browse statistics and paged results retrieved from cache"); } } else { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_Browse_Info", "Building results information"); } // Try to pull more than one page, so we can cache the next page or so List<List<iSearch_Title_Result>> pagesOfResults; // Get from the hierarchy object Multiple_Paged_Results_Args returnArgs = Item_Aggregation_Utilities.Get_Browse_Results(Aggregation_Object, Browse_Object, current_page_index, sort, results_per_page, !special_search_type, need_browse_statistics, Tracer); if (need_browse_statistics) { Complete_Result_Set_Info = returnArgs.Statistics; } pagesOfResults = returnArgs.Paged_Results; if ((pagesOfResults != null) && (pagesOfResults.Count > 0)) Paged_Results = pagesOfResults[0]; // Save the overall result set statistics to the cache if something was pulled if (!special_search_type) { if ((need_browse_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Browse_Result_Statistics(Aggregation_Object.Code, browse_code, Complete_Result_Set_Info, Tracer); } // Save the overall result set statistics to the cache if something was pulled if ((need_paged_results) && (Paged_Results != null)) { CachedDataManager.Store_Browse_Results(Aggregation_Object.Code, browse_code, current_page_index, sort, pagesOfResults, Tracer); } } } break; case Item_Aggregation_Child_Source_Data_Enum.Static_HTML: Browse_Info_Display_Text = SobekEngineClient.Aggregations.Get_Aggregation_HTML_Child_Page(Aggregation_Object.Code, Aggregation_Object.Language, UI_ApplicationCache_Gateway.Settings.System.Default_UI_Language, Browse_Object.Code, Tracer); break; } return true; }
private void Perform_Database_Search(Custom_Tracer Tracer, List<string> Terms, List<string> Web_Fields, long Date1, long Date2, int ActualCount, Navigation_Object Current_Mode, int Current_Sort, Item_Aggregation Aggregation_Object, Item_Lookup_Object All_Items_Lookup, int Results_Per_Page, bool Potentially_Include_Facets, out Search_Results_Statistics Complete_Result_Set_Info, out List<List<iSearch_Title_Result>> Paged_Results, bool Need_Search_Statistics) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Perform_Database_Search", "Query the database for search results"); } // Get the list of facets first List<short> facetsList = Aggregation_Object.Facets; if (!Potentially_Include_Facets) facetsList.Clear(); // Set the return values to NULL initially Complete_Result_Set_Info = null; const bool INCLUDE_PRIVATE = false; // Special code for searching by bibid, oclc, or aleph if (ActualCount == 1) { // Is this a BIBID search? if ((Web_Fields[0] == "BI") && ( Terms[0].IndexOf("*") < 0 ) && ( Terms[0].Length >= 10 )) { string bibid = Terms[0].ToUpper(); string vid = String.Empty; if (bibid.Length > 10) { if ((bibid.IndexOf("_") == 10) && ( bibid.Length > 11 )) { vid = bibid.Substring(11).PadLeft(5, '0'); bibid = bibid.Substring(0, 10); } else if ((bibid.IndexOf(":") == 10) && ( bibid.Length > 11 )) { vid = bibid.Substring(11).PadLeft(5, '0'); bibid = bibid.Substring(0, 10); } else if (bibid.Length == 15) { vid = bibid.Substring(10); bibid = bibid.Substring(0, 10); } } if (bibid.Length == 10) { if (vid.Length == 5) { if (All_Items_Lookup.Contains_BibID_VID(bibid, vid)) { string redirect_url = Current_Mode.Base_URL + bibid + "/" + vid; if ( Current_Mode.Writer_Type == Writer_Type_Enum.HTML_LoggedIn ) redirect_url = Current_Mode.Base_URL + "l/" + bibid + "/" + vid; HttpContext.Current.Response.Redirect(redirect_url, false); HttpContext.Current.ApplicationInstance.CompleteRequest(); Current_Mode.Request_Completed = true; Paged_Results = null; return; } } else { if (All_Items_Lookup.Contains_BibID(bibid)) { string redirect_url = Current_Mode.Base_URL + bibid; if (Current_Mode.Writer_Type == Writer_Type_Enum.HTML_LoggedIn) redirect_url = Current_Mode.Base_URL + "l/" + bibid; HttpContext.Current.Response.Redirect(redirect_url, false); HttpContext.Current.ApplicationInstance.CompleteRequest(); Current_Mode.Request_Completed = true; Paged_Results = null; return; } } } } // Was this a OCLC search? if ((Web_Fields[0] == "OC") && (Terms[0].Length > 0)) { bool is_number = Terms[0].All(Char.IsNumber); if (is_number) { long oclc = Convert.ToInt64(Terms[0]); Multiple_Paged_Results_Args returnArgs = Engine_Database.Items_By_OCLC_Number(oclc, false, Results_Per_Page, Current_Sort, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; return; } } // Was this a ALEPH search? if ((Web_Fields[0] == "AL") && (Terms[0].Length > 0)) { bool is_number = Terms[0].All(Char.IsNumber); if (is_number) { int aleph = Convert.ToInt32(Terms[0]); Multiple_Paged_Results_Args returnArgs = Engine_Database.Items_By_ALEPH_Number(aleph, false, Results_Per_Page, Current_Sort, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; return; } } } List<short> links = new List<short>(); List<short> db_fields = new List<short>(); List<string> db_terms = Terms.ToList(); // Step through all the web fields and convert to db fields for (int i = 0; i < ActualCount; i++) { if (Web_Fields[i].Length > 1) { // Find the joiner if ((Web_Fields[i][0] == '+') || (Web_Fields[i][0] == '=') || (Web_Fields[i][0] == '-')) { if (Web_Fields[i][0] == '+') links.Add(0); if (Web_Fields[i][0] == '=') links.Add(1); if (Web_Fields[i][0] == '-') links.Add(2); Web_Fields[i] = Web_Fields[i].Substring(1); } else { links.Add(0); } // Find the db field number db_fields.Add(Metadata_Field_Number(Web_Fields[i])); } // Also add starting and ending quotes to all the valid searches if (db_terms[i].Length > 0) { if ((db_terms[i].IndexOf("\"") < 0) && (db_terms[i].IndexOf(" ") < 0)) { // Since this is a single word, see what type of special codes to include switch (Current_Mode.Search_Precision) { case Search_Precision_Type_Enum.Contains: db_terms[i] = "\"" + db_terms[i] + "\""; break; case Search_Precision_Type_Enum.Inflectional_Form: // If there are any non-characters, don't use inflectional for this term bool inflectional = db_terms[i].All(Char.IsLetter); if (inflectional) { db_terms[i] = "FORMSOF(inflectional," + db_terms[i] + ")"; } else { db_terms[i] = "\"" + db_terms[i] + "\""; } break; case Search_Precision_Type_Enum.Synonmic_Form: // If there are any non-characters, don't use thesaurus for this term bool thesaurus = db_terms[i].All(Char.IsLetter); if (thesaurus) { db_terms[i] = "FORMSOF(thesaurus," + db_terms[i] + ")"; } else { db_terms[i] = "\"" + db_terms[i] + "\""; } break; } } else { if (Current_Mode.Search_Precision != Search_Precision_Type_Enum.Exact_Match) { db_terms[i] = "\"" + db_terms[i] + "\""; } } } } // Get the page count in the results int current_page_index = Current_Mode.Page.HasValue ? Math.Max(Current_Mode.Page.Value, ((ushort)1)) : 1; // If this is an exact match, just do the search if (Current_Mode.Search_Precision == Search_Precision_Type_Enum.Exact_Match) { Multiple_Paged_Results_Args returnArgs = Engine_Database.Perform_Metadata_Exact_Search_Paged(db_terms[0], db_fields[0], INCLUDE_PRIVATE, Current_Mode.Aggregation, Date1, Date2, Results_Per_Page, current_page_index, Current_Sort, Need_Search_Statistics, facetsList, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; } else { // Finish filling up the fields and links while (links.Count < 10) links.Add(0); while (db_fields.Count < 10) db_fields.Add(-1); while (db_terms.Count < 10) db_terms.Add(String.Empty); // See if this is a simple search, which can use a more optimized search routine bool simplified_search = db_fields.All(Field => (Field <= 0)); // Perform either the simpler metadata search, or the more complex if (simplified_search) { StringBuilder searchBuilder = new StringBuilder(); for (int i = 0; i < db_terms.Count; i++) { if (db_terms[i].Length > 0) { if (i > 0) { if (i > links.Count) { searchBuilder.Append(" AND "); } else { switch (links[i]) { case 0: searchBuilder.Append(" AND "); break; case 1: searchBuilder.Append(" OR "); break; case 2: searchBuilder.Append(" AND NOT "); break; } } } searchBuilder.Append(db_terms[i]); } } Multiple_Paged_Results_Args returnArgs = Engine_Database.Perform_Metadata_Search_Paged(searchBuilder.ToString(), INCLUDE_PRIVATE, Current_Mode.Aggregation, Date1, Date2, Results_Per_Page, current_page_index, Current_Sort, Need_Search_Statistics, facetsList, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; } else { // Perform search in the database Multiple_Paged_Results_Args returnArgs = Engine_Database.Perform_Metadata_Search_Paged(links[0], db_terms[0], db_fields[0], links[1], db_terms[1], db_fields[1], links[2], db_terms[2], db_fields[2], links[3], db_terms[3], db_fields[3], links[4], db_terms[4], db_fields[4], links[5], db_terms[5], db_fields[5], links[6], db_terms[6], db_fields[6], links[7], db_terms[7], db_fields[7], links[8], db_terms[8], db_fields[8], links[9], db_terms[9], db_fields[9], INCLUDE_PRIVATE, Current_Mode.Aggregation, Date1, Date2, Results_Per_Page, current_page_index, Current_Sort, Need_Search_Statistics, facetsList, Need_Search_Statistics, Tracer); if (Need_Search_Statistics) Complete_Result_Set_Info = returnArgs.Statistics; Paged_Results = returnArgs.Paged_Results; } } }
/// <summary> Stores the table of search results to the cache </summary> /// <param name="Current_Mode"> Mode / navigation information for the current request</param> /// <param name="Terms"> List of all search terms for the search result statistics to store </param> /// <param name="Fields"> List of all search fields for the search result statistics to store </param> /// <param name="Count"> Number of fields or terms to include in the key for this result </param> /// <param name="DateRange_Start"> Beginning of a date range search, or -1 </param> /// <param name="DateRange_End"> End of a date range search, or -1 </param> /// <param name="StoreObject"> Search results item/title list </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering</param> public static void Store_Search_Result_Statistics(Results_Arguments Current_Mode, int Count, List<string> Fields, List<string> Terms, long DateRange_Start, long DateRange_End, Search_Results_Statistics StoreObject, Custom_Tracer Tracer) { // If the cache is disabled, just return before even tracing if (Settings.Disabled) return; // Determine the key // If there is no aggregation listed, use 'all' string aggregation_code = Current_Mode.Aggregation.ToLower(); if (aggregation_code.Length == 0) aggregation_code = "all"; // Determine the search precision string precision = "results"; switch (Current_Mode.Search_Precision) { case Search_Precision_Type_Enum.Contains: precision = "contains"; break; case Search_Precision_Type_Enum.Exact_Match: precision = "exact"; break; case Search_Precision_Type_Enum.Synonmic_Form: precision = "like"; break; } // Start to build the key StringBuilder keyBuilder = new StringBuilder("TOTALRESULTS_" + precision + "_" + aggregation_code + "_T_"); for (int i = 0; i < Count; i++) { keyBuilder.Append(Terms[i].ToLower() + "_"); } keyBuilder.Append("F_"); for (int i = 0; i < Count; i++) { keyBuilder.Append(Fields[i] + "_"); } // Add possivle date range search restrction to the key if (DateRange_Start >= 0) { keyBuilder.Append("_DATE" + DateRange_Start); if (DateRange_End >= 0) { keyBuilder.Append("-" + DateRange_End); } } string key = keyBuilder.ToString(); //if (Current_Mode.SubAggregation.Length > 0) //{ // key = "a_" + precision + "_" + aggregation_code + "s_" + Current_Mode.SubAggregation + "t_" + Current_Mode.Search_String + "f_" + search_fields; //} if ((String.IsNullOrEmpty(Current_Mode.Search_String)) && (!String.IsNullOrEmpty(Current_Mode.Coordinates))) { key = "TOTALRESULTS_" + precision + "_" + aggregation_code + "coord_" + Current_Mode.Coordinates; } // Store this on the local cache, if not there and storing on the cache server failed if (HttpContext.Current.Cache[key] == null) { if (Tracer != null) { Tracer.Add_Trace("CachedDataManager.Store_Search_Result_Statistics", "Adding object '" + key + "' to the local cache with expiration of 3 minutes"); } HttpContext.Current.Cache.Insert(key, StoreObject, null, Cache.NoAbsoluteExpiration, TimeSpan.FromMinutes(3)); } }
/// <summary> Retrieve the public user folder information and browse by user folder id </summary> /// <param name="UserFolderID"> Primary key for the public user folder to retrieve </param> /// <param name="ResultsPage">Which page of results to return ( one-based, so the first page is page number of one ) </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> /// <param name="Folder_Info"> [OUT] Information about this public user folder including name and owner</param> /// <param name="Complete_Result_Set_Info"> [OUT] Information about the entire set of results </param> /// <param name="Paged_Results"> [OUT] List of search results for the requested page of results </param> /// <returns> TRUE if successful, otherwise FALSE </returns> /// <remarks> This attempts to pull the objects from the cache. If unsuccessful, it builds the objects from the /// database and hands off to the <see cref="CachedDataManager" /> to store in the cache </remarks> public bool Get_Public_User_Folder(int UserFolderID, int ResultsPage, Custom_Tracer Tracer, out Public_User_Folder Folder_Info, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_Public_User_Folder", String.Empty); } // Set output initially to null Paged_Results = null; Complete_Result_Set_Info = null; // Try to get this from the cache first, otherwise get from database and store in cache Folder_Info = CachedDataManager.Retrieve_Public_Folder_Info(UserFolderID, Tracer); if (Folder_Info == null) { Folder_Info = SobekCM_Database.Get_Public_User_Folder(UserFolderID, Tracer); if ((Folder_Info != null) && (Folder_Info.IsPublic)) { CachedDataManager.Store_Public_Folder_Info(Folder_Info, Tracer); } } // If this folder is invalid or private, return false if ((Folder_Info == null) || (!Folder_Info.IsPublic)) { return false; } // Look to see if the browse statistics are available on any cache for this browse bool need_browse_statistics = true; Complete_Result_Set_Info = CachedDataManager.Retrieve_Public_Folder_Statistics(UserFolderID, Tracer); if (Complete_Result_Set_Info != null) need_browse_statistics = false; // Look to see if the paged results are available on any cache.. bool need_paged_results = true; Paged_Results = CachedDataManager.Retrieve_Public_Folder_Browse(UserFolderID, ResultsPage, Tracer); if (Paged_Results != null) need_paged_results = false; // Was a copy found in the cache? if ((!need_browse_statistics) && (!need_paged_results)) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_User_Folder", "Browse statistics and paged results retrieved from cache"); } } else { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_User_Folder", "Building results information"); } Single_Paged_Results_Args returnArgs = Engine_Database.Get_Public_Folder_Browse(UserFolderID, 20, ResultsPage, false, new List<short>(), need_browse_statistics, Tracer); if (need_browse_statistics) { Complete_Result_Set_Info = returnArgs.Statistics; } Paged_Results = returnArgs.Paged_Results; // Save the overall result set statistics to the cache if something was pulled if ((need_browse_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Public_Folder_Statistics(UserFolderID, Complete_Result_Set_Info, Tracer); } // Save the overall result set statistics to the cache if something was pulled if ((need_paged_results) && (Paged_Results != null)) { CachedDataManager.Store_Public_Folder_Browse(UserFolderID, ResultsPage, Paged_Results, Tracer); } } return true; }
/// <summary> Constructor for a new instance of the No_Results_ResultsViewer class </summary> /// <param name="RequestSpecificValues"> All the necessary, non-global data specific to the current request </param> /// <param name="ResultsStats"> Statistics about the results to display including the facets </param> /// <param name="PagedResults"> Actual pages of results </param> public No_Results_ResultsViewer(RequestCache RequestSpecificValues, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> PagedResults) : base(RequestSpecificValues, ResultsStats, PagedResults) { // Do nothing }
/// <summary> Performs a search ( or retrieves the search results from the cache ) and outputs the results and search url used </summary> /// <param name="Current_Mode"> Mode / navigation information for the current request</param> /// <param name="All_Items_Lookup"> Lookup object used to pull basic information about any item loaded into this library </param> /// <param name="Aggregation_Object"> Object for the current aggregation object, against which this search is performed </param> /// <param name="Search_Stop_Words"> List of search stop workds </param> /// <param name="Tracer"> Trace object keeps a list of each method executed and important milestones in rendering </param> /// <param name="Complete_Result_Set_Info"> [OUT] Information about the entire set of results </param> /// <param name="Paged_Results"> [OUT] List of search results for the requested page of results </param> public void Get_Search_Results(Navigation_Object Current_Mode, Item_Lookup_Object All_Items_Lookup, Item_Aggregation Aggregation_Object, List<string> Search_Stop_Words, Custom_Tracer Tracer, out Search_Results_Statistics Complete_Result_Set_Info, out List<iSearch_Title_Result> Paged_Results ) { if (Tracer != null) { Tracer.Add_Trace("SobekCM_Assistant.Get_Search_Results", String.Empty); } // Set output initially to null Paged_Results = null; Complete_Result_Set_Info = null; // Get the sort int sort = Current_Mode.Sort.HasValue ? Math.Max(Current_Mode.Sort.Value, ((ushort)1)) : 0; if ((sort != 0) && (sort != 1) && (sort != 2) && (sort != 10) && (sort != 11)) sort = 0; // Depending on type of search, either go to database or Greenstone if (Current_Mode.Search_Type == Search_Type_Enum.Map) { // If this is showing in the map, only allow sot zero, which is by coordinates if ((Current_Mode.Result_Display_Type == Result_Display_Type_Enum.Map) || (Current_Mode.Result_Display_Type == Result_Display_Type_Enum.Default)) { Current_Mode.Sort = 0; sort = 0; } try { double lat1 = 1000; double long1 = 1000; double lat2 = 1000; double long2 = 1000; string[] terms = Current_Mode.Coordinates.Split(",".ToCharArray()); if (terms.Length < 2) { Current_Mode.Mode = Display_Mode_Enum.Search; UrlWriterHelper.Redirect(Current_Mode); return; } if (terms.Length < 4) { lat1 = Convert.ToDouble(terms[0]); lat2 = lat1; long1 = Convert.ToDouble(terms[1]); long2 = long1; } if (terms.Length >= 4) { if (terms[0].Length > 0) lat1 = Convert.ToDouble(terms[0]); if (terms[1].Length > 0) long1 = Convert.ToDouble(terms[1]); if (terms[2].Length > 0) lat2 = Convert.ToDouble(terms[2]); if (terms[3].Length > 0) long2 = Convert.ToDouble(terms[3]); } // If neither point is valid, return if (((lat1 == 1000) || (long1 == 1000)) && ((lat2 == 1000) || (long2 == 1000))) { Current_Mode.Mode = Display_Mode_Enum.Search; UrlWriterHelper.Redirect(Current_Mode); return; } // If just the first point is valid, use that if ((lat2 == 1000) || (long2 == 1000)) { lat2 = lat1; long2 = long1; } // If just the second point is valid, use that if ((lat1 == 1000) || (long1 == 1000)) { lat1 = lat2; long1 = long2; } // Perform the search against the database try { // Get the page count in the results int current_page_index = Current_Mode.Page.HasValue ? Math.Max(Current_Mode.Page.Value, ((ushort)1)) : 1; // Try to pull more than one page, so we can cache the next page or so Multiple_Paged_Results_Args returnArgs = Engine_Database.Get_Items_By_Coordinates(Current_Mode.Aggregation, lat1, long1, lat2, long2, false, 20, current_page_index, sort, false, new List<short>(), true, Tracer); List<List<iSearch_Title_Result>> pagesOfResults = returnArgs.Paged_Results; Complete_Result_Set_Info = returnArgs.Statistics; if ((pagesOfResults != null) && (pagesOfResults.Count > 0)) Paged_Results = pagesOfResults[0]; } catch (Exception ee) { // Next, show the message to the user Current_Mode.Mode = Display_Mode_Enum.Error; string error_message = ee.Message; if (error_message.ToUpper().IndexOf("TIMEOUT") >= 0) { error_message = "Database Timeout Occurred<br /><br />Try again in a few minutes.<br /><br />"; } Current_Mode.Error_Message = error_message; Current_Mode.Caught_Exception = ee; } } catch { Current_Mode.Mode = Display_Mode_Enum.Search; UrlWriterHelper.Redirect(Current_Mode); } } else { List<string> terms = new List<string>(); List<string> web_fields = new List<string>(); // Split the terms correctly ( only use the database stop words for the split if this will go to the database ultimately) if ((Current_Mode.Search_Type == Search_Type_Enum.Full_Text) || (Current_Mode.Search_Fields.IndexOf("TX") >= 0)) { Split_Clean_Search_Terms_Fields(Current_Mode.Search_String, Current_Mode.Search_Fields, Current_Mode.Search_Type, terms, web_fields, null, Current_Mode.Search_Precision, ','); } else { Split_Clean_Search_Terms_Fields(Current_Mode.Search_String, Current_Mode.Search_Fields, Current_Mode.Search_Type, terms, web_fields, Search_Stop_Words, Current_Mode.Search_Precision, ','); } // Get the count that will be used int actualCount = Math.Min(terms.Count, web_fields.Count); // Determine if this is a special search type which returns more rows and is not cached. // This is used to return the results as XML and DATASET bool special_search_type = false; int results_per_page = 20; if ((Current_Mode.Writer_Type == Writer_Type_Enum.XML) || (Current_Mode.Writer_Type == Writer_Type_Enum.DataSet)) { results_per_page = 1000000; special_search_type = true; sort = 2; // Sort by BibID always for these } // Determine if a date range was provided long date1 = -1; long date2 = -1; if (Current_Mode.DateRange_Date1.HasValue) { date1 = Current_Mode.DateRange_Date1.Value; if (Current_Mode.DateRange_Date2.HasValue) { if (Current_Mode.DateRange_Date2.Value >= Current_Mode.DateRange_Date1.Value) date2 = Current_Mode.DateRange_Date2.Value; else { date1 = Current_Mode.DateRange_Date2.Value; date2 = Current_Mode.DateRange_Date1.Value; } } else { date2 = date1; } } if (date1 < 0) { if ((Current_Mode.DateRange_Year1.HasValue ) && ( Current_Mode.DateRange_Year1.Value > 0 )) { DateTime startDate = new DateTime(Current_Mode.DateRange_Year1.Value, 1, 1); TimeSpan timeElapsed = startDate.Subtract(new DateTime(1, 1, 1)); date1 = (long)timeElapsed.TotalDays; if ((Current_Mode.DateRange_Year2.HasValue) && (Current_Mode.DateRange_Year2.Value > 0)) { startDate = new DateTime(Current_Mode.DateRange_Year2.Value, 12, 31); timeElapsed = startDate.Subtract(new DateTime(1, 1, 1)); date2 = (long)timeElapsed.TotalDays; } else { startDate = new DateTime(Current_Mode.DateRange_Year1.Value, 12, 31); timeElapsed = startDate.Subtract(new DateTime(1, 1, 1)); date2 = (long) timeElapsed.TotalDays; } } } // Set the flags for how much data is needed. (i.e., do we need to pull ANYTHING? or // perhaps just the next page of results ( as opposed to pulling facets again). bool need_search_statistics = true; bool need_paged_results = true; if (!special_search_type) { // Look to see if the search statistics are available on any cache.. Complete_Result_Set_Info = CachedDataManager.Retrieve_Search_Result_Statistics(Current_Mode, actualCount, web_fields, terms, date1, date2, Tracer); if (Complete_Result_Set_Info != null) need_search_statistics = false; // Look to see if the paged results are available on any cache.. Paged_Results = CachedDataManager.Retrieve_Search_Results(Current_Mode, sort, actualCount, web_fields, terms, date1, date2, Tracer); if (Paged_Results != null) need_paged_results = false; } // If both were retrieved, do nothing else if ((need_paged_results) || (need_search_statistics)) { // Should this pull the search from the database, or from greenstone? if ((Current_Mode.Search_Type == Search_Type_Enum.Full_Text) || (Current_Mode.Search_Fields.IndexOf("TX") >= 0)) { try { // Get the page count in the results int current_page_index = Current_Mode.Page.HasValue ? Math.Max(Current_Mode.Page.Value, ((ushort)1)) : 1; // Perform the search against greenstone Search_Results_Statistics recomputed_search_statistics; Perform_Solr_Search(Tracer, terms, web_fields, actualCount, Current_Mode.Aggregation, current_page_index, sort, results_per_page, out recomputed_search_statistics, out Paged_Results); if (need_search_statistics) Complete_Result_Set_Info = recomputed_search_statistics; } catch (Exception ee) { Current_Mode.Mode = Display_Mode_Enum.Error; Current_Mode.Error_Message = "Unable to perform search at this time"; Current_Mode.Caught_Exception = ee; } // If this was a special search, don't cache this if (!special_search_type) { // Cache the search statistics, if it was needed if ((need_search_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Search_Result_Statistics(Current_Mode, actualCount, web_fields, terms, date1, date2, Complete_Result_Set_Info, Tracer); } // Cache the search results if ((need_paged_results) && (Paged_Results != null)) { CachedDataManager.Store_Search_Results(Current_Mode, sort, actualCount, web_fields, terms, date1, date2, Paged_Results, Tracer); } } } else { // Try to pull more than one page, so we can cache the next page or so List<List<iSearch_Title_Result>> pagesOfResults = new List<List<iSearch_Title_Result>>(); // Perform the search against the database try { Search_Results_Statistics recomputed_search_statistics; Perform_Database_Search(Tracer, terms, web_fields, date1, date2, actualCount, Current_Mode, sort, Aggregation_Object, All_Items_Lookup, results_per_page, !special_search_type, out recomputed_search_statistics, out pagesOfResults, need_search_statistics); if (need_search_statistics) Complete_Result_Set_Info = recomputed_search_statistics; if ((pagesOfResults != null) && (pagesOfResults.Count > 0)) Paged_Results = pagesOfResults[0]; } catch (Exception ee) { // Next, show the message to the user Current_Mode.Mode = Display_Mode_Enum.Error; string error_message = ee.Message; if (error_message.ToUpper().IndexOf("TIMEOUT") >= 0) { error_message = "Database Timeout Occurred<br /><br />Try narrowing your search by adding more terms <br />or putting quotes around your search.<br /><br />"; } Current_Mode.Error_Message = error_message; Current_Mode.Caught_Exception = ee; } // If this was a special search, don't cache this if (!special_search_type) { // Cache the search statistics, if it was needed if ((need_search_statistics) && (Complete_Result_Set_Info != null)) { CachedDataManager.Store_Search_Result_Statistics(Current_Mode, actualCount, web_fields, terms, date1, date2, Complete_Result_Set_Info, Tracer); } // Cache the search results if ((need_paged_results) && (pagesOfResults != null)) { CachedDataManager.Store_Search_Results(Current_Mode, sort, actualCount, web_fields, terms, date1, date2, pagesOfResults, Tracer); } } } } } ////create search results json object and place into session state //DataTable TEMPsearchResults = new DataTable(); //TEMPsearchResults.Columns.Add("BibID", typeof(string)); //TEMPsearchResults.Columns.Add("Spatial_Coordinates", typeof(string)); //foreach (iSearch_Title_Result searchTitleResult in Paged_Results) //{ // TEMPsearchResults.Rows.Add(searchTitleResult.BibID, searchTitleResult.Spatial_Coordinates); //} //HttpContext.Current.Session["TEMPSearchResultsJSON"] = Google_Map_ResultsViewer_Beta.Create_JSON_Search_Results_Object(TEMPsearchResults); }
/// <summary> Constructor for a new instance of the Single_Paged_Results_Args class </summary> /// <param name="Statistics"> Statistics/information about the overall search or browse, including initial query time, complete results counts, and facets </param> /// <param name="Paged_Results"> Single page of results, which is collection of search title results </param> public Single_Paged_Results_Args(Search_Results_Statistics Statistics, List <iSearch_Title_Result> Paged_Results) { this.Statistics = Statistics; this.Paged_Results = Paged_Results; }
/// <summary> Constructor for a new instance of the Export_File_ResultsViewer class </summary> /// <param name="RequestSpecificValues"> All the necessary, non-global data specific to the current request </param> /// <param name="ResultsStats"> Statistics about the results to display including the facets </param> /// <param name="PagedResults"> Actual pages of results </param> public Export_File_ResultsViewer(RequestCache RequestSpecificValues, Search_Results_Statistics ResultsStats, List<iSearch_Title_Result> PagedResults) : base(RequestSpecificValues, ResultsStats, PagedResults) { Results_Per_Page = 1000; }