public static string SISEncriptarTexto(string pPassword) { SqlString datoStringSQL = pPassword; SqlString datoEncriptado = UserDefinedFunctions.EncryptFunction(datoStringSQL); return(Extensors.CheckStr(datoEncriptado)); }
public Registry() { Id = UserDefinedFunctions.NewGuid(); Type = RegistryTypes.Registry; Items = new HashSet <RegistryItemDefinition>(); Organizations = new HashSet <OrganizationRegistry>(); }
public Group() { this.Organizations = new HashSet <Organization>(); this.DataMarts = new HashSet <DataMart>(); this.Projects = new HashSet <Project>(); this.SID = UserDefinedFunctions.NewGuid(); }
/// <summary> /// Gets the discovery path. /// </summary> /// <param name = "downloadedDiscoveryDirectory">The downloaded discovery directory.</param> /// <param name = "absoluteUri">The absolute URI.</param> /// <returns></returns> public override string GetDiscoveryPath(string downloadedDiscoveryDirectory, string absoluteUri, string fullTextIndexType) { string directory = UserDefinedFunctions.ExtractDirectory(downloadedDiscoveryDirectory, absoluteUri).Value; DirectoryInfo directoryInfo = null; if (!Directory.Exists(directory)) { //ANODET: View the history of this... :) //http://msdn.microsoft.com/en-us/library/aa365247(v=vs.85).aspx#file_and_directory_names //CON, PRN, AUX, NUL, COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9, LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, and LPT9 directory += "\\"; directory = Regex.Replace(directory.ToLower(), "\\\\com\\d\\\\", "\\com_\\"); directory = Regex.Replace(directory.ToLower(), "\\\\lpt\\d\\\\", "\\lpt_\\"); directory = directory.ToLower().Replace("\\aux\\", "\\aux_\\").Replace("\\con\\", "\\con_\\").Replace("\\nul\\", "\\nul_\\").Replace("\\prn\\", "\\prn_\\"); directory = directory.Substring(0, directory.Length - 1); directoryInfo = Directory.CreateDirectory(directory); } else { directoryInfo = new DirectoryInfo(directory); } return(Path.Combine(directoryInfo.FullName, "_" + new Hash(absoluteUri) + fullTextIndexType)); }
public static SqlString GetString(string str, int numRemoveLetters) { string stringValue = UserDefinedFunctions.CleanUpProductName(str); //string stringValue = str.Replace(" ", " ").Trim(); // dbo.Split string stringSortValue = UserDefinedFunctions.CleanUpProductName(UserDefinedFunctions.SortString(stringValue, " ", numRemoveLetters)); if (stringSortValue == null) { stringSortValue = "0"; } StringBuilder res = new StringBuilder(); // Words var matches = Regex.Matches(stringSortValue, @"[\D']*"); foreach (Match match in matches) { res.Append(match.Value.TrimStart().TrimEnd() + " ").ToString(); } // Result if (res == null || res.ToString().Trim() == "") { return(""); } else { return(res.ToString().Trim()); } }
public static string RemoveDuplicateString(string str) { string stringValue = UserDefinedFunctions.CleanUpProductName(str); // Keep track of words found in this Dictionary. var d = new Dictionary <string, bool>(); // Build up string into this StringBuilder. StringBuilder b = new StringBuilder(); // Split the input and handle spaces and punctuation. string[] a = stringValue.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); // Loop over each word foreach (string current in a) { // Lowercase each word string lower = current.ToLower(); // If we haven't already encountered the word, // append it to the result. if (!d.ContainsKey(lower)) { b.Append(current).Append(' '); d.Add(lower, true); } } // Return the duplicate words removed return(b.ToString().Trim()); }
private static void ProcessXPaths(IDictionary <string, XPathInfo> xpathInfos) { foreach (XPathInfo xPathInfo1 in xpathInfos.Values) { foreach (XPathInfo xPathInfo2 in xpathInfos.Values) { try { if (xPathInfo1.InnerText != xPathInfo2.InnerText) { int maximumNumberOfCharatersToEvaluate = xPathInfo1.InnerText.Length > xPathInfo2.InnerText.Length ? xPathInfo2.InnerText.Length : xPathInfo1.InnerText.Length; maximumNumberOfCharatersToEvaluate = maximumNumberOfCharatersToEvaluate > 1000 ? 1000 : maximumNumberOfCharatersToEvaluate; int levenstheinDistance = UserDefinedFunctions.ComputeLevenstheinDistance(xPathInfo1.InnerText.Substring(0, maximumNumberOfCharatersToEvaluate), xPathInfo2.InnerText.Substring(0, maximumNumberOfCharatersToEvaluate)).Value; xPathInfo1.LevenstheinDistance += levenstheinDistance; xPathInfo2.LevenstheinDistance += levenstheinDistance; } } catch { } } } }
static void Main(string[] args) { ArrendamientoInmuebleEntities ctx = new ArrendamientoInmuebleEntities(); using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Users\desa21\source\repos\BucaReegexEnStoreProcedures\BucaReegexEnStoreProcedures\Resultados\SPs.txt")) { var results = ctx.Database.SqlQuery <INFORMATION_SCHEMA_Result>("SELECT ROUTINE_NAME, ROUTINE_DEFINITION FROM INFORMATION_SCHEMA.ROUTINES"); foreach (var reg in results) { if (U.IsExcelNull(reg.ROUTINE_DEFINITION)) { file.WriteLine(reg.ROUTINE_NAME + " ......< definición vacia"); } else { if (UserDefinedFunctions.fncRegexFind("Cat_Parametro", reg.ROUTINE_DEFINITION.Equals(null) ? " " : reg.ROUTINE_DEFINITION)) { file.WriteLine("NAME >> " + reg.ROUTINE_NAME + " <<"); file.WriteLine("---------------------------------"); file.WriteLine(reg.ROUTINE_DEFINITION); file.WriteLine("////////////////////////////=================================="); } } } } Console.WriteLine("Fin"); Console.ReadKey(); }
/// <summary> /// Determines the type of the data. /// </summary> /// <param name = "crawlRequest">The crawl request.</param> /// <returns></returns> public override DataType DetermineDataType(CrawlRequest <TArachnodeDAO> crawlRequest) { DataType dataType; string extension = UserDefinedFunctions.ExtractFileExtension(crawlRequest.Discovery.Uri.AbsoluteUri.ToLower()).Value; if (crawlRequest.WebClient.HttpWebResponse != null && !string.IsNullOrEmpty(crawlRequest.WebClient.HttpWebResponse.ContentType)) { string contentType = crawlRequest.WebClient.HttpWebResponse.ContentType.Split(';')[0].ToLowerInvariant().Replace("\"", ""); if (AllowedDataTypes.ContainsKey(contentType)) { dataType = DetermineDataType(contentType, extension); } else { if (_contentTypesByName.ContainsKey(contentType)) { dataType = new DataType(contentType, _contentTypesByName[contentType], DiscoveryType.None, extension, null, null); } else { dataType = new DataType(contentType, _contentTypesByName["UNKNOWN"], DiscoveryType.None, extension, null, null); } } } else { dataType = new DataType(null, _contentTypesByName["UNKNOWN"], DiscoveryType.None, null, null, null); } return(dataType); }
public Project() { this.SID = UserDefinedFunctions.NewGuid(); this.DataMarts = new HashSet <DataMart>(); this.SecurityGroups = new HashSet <ProjectSecurityGroup>(); this.Activities = new HashSet <Activity>(); }
public static double FinalMatchNumbersDistance(String strInput1, String strInput2, Boolean ignoreCase) { int defaultMatch = 0; if (strInput1 == null) { strInput1 = ""; } if (strInput2 == null) { strInput2 = ""; } if ((strInput1 != null) || (strInput2 != null)) { int ic = Convert.ToInt32(ignoreCase); double valueStrInput1 = UserDefinedFunctions.MatchNumbersDistance(strInput1, strInput2, ic); double valueStrInput2 = UserDefinedFunctions.MatchNumbersDistance(strInput2, strInput1, ic); double finalValue1 = (valueStrInput1 + valueStrInput2) / 2; // if numerator is zero, return zero else return "Final Match Numbers Distance" if (valueStrInput1 == 0.0 || valueStrInput2 == 0.0) { return(0); } return(finalValue1); } return(defaultMatch); }
public void GetDebtsByPeriod() { var res = UserDefinedFunctions.GetDebtsByPeriod("11816105", new DateTime(2004, 12, 1), new DateTime(2016, 10, 1)); Assert.IsInstanceOfType(res, typeof(IEnumerable)); Assert.IsTrue((res as DataRowCollection).Count > 1); }
public void GetDebtsByServices() { var res = UserDefinedFunctions.GetDebtsByServices("11789150", new DateTime(2012, 10, 1), new DateTime(2017, 1, 31)); Assert.IsInstanceOfType(res, typeof(IEnumerable)); Assert.IsTrue((res as DataRowCollection).Count > 0); }
public static SqlString GetNumbers(string str) { string stringValue = UserDefinedFunctions.CleanUpProductName(str); if (stringValue == null) { stringValue = ""; } string newStringValue = ""; char[] KeepArray = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ' ' }; foreach (char thischar in stringValue) { foreach (char keepchar in KeepArray) { if (keepchar == thischar) { newStringValue += thischar; } } } return((SqlString)(UserDefinedFunctions.CleanUpProductName(newStringValue))); }
public void GetLawsuits() { var res = UserDefinedFunctions.GetLawsuits(2); Assert.IsInstanceOfType(res, typeof(IEnumerable)); Assert.IsTrue((res as DataRowCollection).Count > 0); }
public void GetLivings() { var res = UserDefinedFunctions.GetLivings("7777777", 150, 250); Assert.IsInstanceOfType(res, typeof(IEnumerable)); Assert.AreEqual(6, (res as DataRowCollection).Count); }
public void GetDebtsTotal() { int i_lschet = 11816105; // номер лицевого счета var res = UserDefinedFunctions.GetDebtsTotal(i_lschet); Assert.IsInstanceOfType(res, typeof(IEnumerable)); Assert.IsTrue((res as DataRowCollection).Count == 1); }
public User() { this.UserPasswordTraces = new HashSet <UserPasswordTrace>(); this.SID = UserDefinedFunctions.NewGuid(); this.Subscriptions = new HashSet <Subscription>(); this.PasswordExpiration = DateTime.Now.AddMonths(6); this.PasswordRestorationTokenExpiration = DateTime.Now; }
public void TestMethodUserDefinedFunctions() { SqlDouble originalAmount = 100.0; SqlDouble ExpectedAmount = 100.0 * 1.010; SqlDouble finalAmount = UserDefinedFunctions.addGsTTax(originalAmount); Assert.AreEqual(ExpectedAmount, finalAmount); }
/// <summary> /// Determines whether the specified crawl request is disallowed. /// </summary> /// <param name = "crawlRequest">The crawl request.</param> /// <param name = "arachnodeDAO">The arachnode DAO.</param> /// <returns> /// <c>true</c> if the specified crawl request is disallowed; otherwise, <c>false</c>. /// </returns> public override bool IsDisallowed(CrawlRequest <TArachnodeDAO> crawlRequest, IArachnodeDAO arachnodeDAO) { //ANODET: When you add the multi-server caching, the robots.txt file will need to be sent to all other CachePeers. //if we're not being called by the Engine prior to assigning to a Crawl... if (crawlRequest.Crawl != null) { string robotsDotTextAbsoluteUri = crawlRequest.Discovery.Uri.Scheme + Uri.SchemeDelimiter + crawlRequest.Discovery.Uri.Host + "/robots.txt"; crawlRequest.OutputIsDisallowedReason = OutputIsDisallowedReason; if (!UserDefinedFunctions.IsDisallowedForAbsoluteUri(robotsDotTextAbsoluteUri, false, false)) { if (crawlRequest.Politeness.DisallowedPaths == null || (crawlRequest.Politeness.DisallowedPaths != null && DateTime.Now.Subtract(crawlRequest.Politeness.DisallowedPathsSince) > TimeSpan.FromDays(1))) { CrawlRequest <TArachnodeDAO> robotsDotTextRequest = new CrawlRequest <TArachnodeDAO>(crawlRequest, crawlRequest.Crawl.Crawler.Cache.GetDiscovery(robotsDotTextAbsoluteUri, arachnodeDAO), 1, 1, (short)UriClassificationType.Host, (short)UriClassificationType.Host, double.MaxValue, RenderType.None, RenderType.None); robotsDotTextRequest.Discovery.DiscoveryState = DiscoveryState.Undiscovered; robotsDotTextRequest.Politeness = crawlRequest.Politeness; Crawl <TArachnodeDAO> crawl = new Crawl <TArachnodeDAO>(crawlRequest.Crawl.Crawler.ApplicationSettings, crawlRequest.Crawl.Crawler.WebSettings, crawlRequest.Crawl.Crawler, crawlRequest.Crawl.Crawler.ActionManager, crawlRequest.Crawl.Crawler.ConsoleManager, crawlRequest.Crawl.Crawler.CookieManager, crawlRequest.Crawl.Crawler.CrawlRequestManager, crawlRequest.Crawl.Crawler.DataTypeManager, crawlRequest.Crawl.Crawler.DiscoveryManager, crawlRequest.Crawl.Crawler.EncodingManager, crawlRequest.Crawl.Crawler.HtmlManager, crawlRequest.Crawl.Crawler.PolitenessManager, crawlRequest.Crawl.Crawler.ProxyManager, crawlRequest.Crawl.Crawler.RuleManager, false); robotsDotTextRequest.Crawl = crawl; crawl.ProcessCrawlRequest(robotsDotTextRequest, false, false); crawlRequest.Politeness.DisallowedPathsSince = DateTime.Now; //The DataManager will not download the byte stream is ApplicationSettings.AssignFileAndImageDicoveries is set to false. This is by design. if (robotsDotTextRequest.Data != null && robotsDotTextRequest.Data.Length == 0 && robotsDotTextRequest.WebClient.WebException == null) { robotsDotTextRequest.Data = robotsDotTextRequest.WebClient.DownloadHttpData(crawlRequest.Discovery.Uri.AbsoluteUri, robotsDotTextRequest.WebClient.HttpWebResponse.ContentEncoding.ToLowerInvariant() == "gzip", robotsDotTextRequest.WebClient.HttpWebResponse.ContentEncoding.ToLowerInvariant() == "deflate", crawlRequest.Crawl.Crawler.CookieContainer); } SiteCrawler.Value.RobotsDotText robotsDotText = _robotsDotTextManager.ParseRobotsDotTextSource(new Uri(crawlRequest.Discovery.Uri.Scheme + Uri.SchemeDelimiter + crawlRequest.Discovery.Uri.Host), robotsDotTextRequest.Data); crawlRequest.Politeness.CrawlDelayInMilliseconds = robotsDotText.CrawlDelay * 1000; crawlRequest.Politeness.DisallowedPaths = robotsDotText.DisallowedPaths; } if (crawlRequest.Politeness != null) { if (crawlRequest.Politeness.DisallowedPaths != null) { foreach (string disallowedPath in crawlRequest.Politeness.DisallowedPaths) { if (HttpUtility.UrlDecode(crawlRequest.Discovery.Uri.AbsoluteUri).StartsWith(HttpUtility.UrlDecode(disallowedPath))) { crawlRequest.IsDisallowedReason = "Prohibited by robots.txt."; return(true); } } } } } } return(false); }
static void Main(string[] args) { SqlString Paragraph = "word1 test inbetween test test five word2 word1 test inbetween should return word2 separator word1 inbetween test should return word2"; SqlString KeyWords = "word1,test,inbetween,word2"; SqlInt16 MaxWordCountBeetween = 4; SqlBoolean MaintainOrder = true; var result = UserDefinedFunctions.MultiKeywordFind(Paragraph, KeyWords, MaxWordCountBeetween, MaintainOrder); var checkResultHere = result; }
public DataMart() { this.Routings = new HashSet <RequestRouting>(); this.Groups = new HashSet <Group>(); this.Projects = new HashSet <Project>(); this.InstalledModels = new HashSet <DataMartInstalledModel>(); this.UnattendedMode = UnattendedModeKind.NoUnattendedOperation; this.SID = UserDefinedFunctions.NewGuid(); this.DataMartTypeId = 1; }
public void TestMethod1() { var ret = UserDefinedFunctions.ReadUserProfileFromString( d1, d2, new SqlBytes(GetByteArray(d3)) ) .Cast <object>().ToList() ; }
/// <summary> /// Processes a FilesRow after crawling. /// </summary> /// <param name = "filesRow">The files row.</param> /// <param name="webClient"></param> /// <param name="actionManager"></param> /// <param name="consoleManager"></param> /// <param name="discoveryManager"></param> /// <param name = "fileManager">The file manager.</param> /// <param name = "fileManager">The file manager.</param> /// <param name="memoryManager"></param> /// <param name="ruleManager"></param> /// <param name = "arachnodeDAO">The arachnode DAO.</param> /// <param name = "imageManager">The image manager.</param> public static void ProcessFile(ApplicationSettings applicationSettings, WebSettings webSettings, Crawler <TArachnodeDAO> crawler, ArachnodeDataSet.FilesRow filesRow, WebClient <TArachnodeDAO> webClient, Cache <TArachnodeDAO> cache, ActionManager <TArachnodeDAO> actionManager, ConsoleManager <TArachnodeDAO> consoleManager, CrawlerPeerManager <TArachnodeDAO> crawlerPeerManager, DiscoveryManager <TArachnodeDAO> discoveryManager, FileManager <TArachnodeDAO> fileManager, MemoryManager <TArachnodeDAO> memoryManager, RuleManager <TArachnodeDAO> ruleManager, IArachnodeDAO arachnodeDAO) { CacheManager <TArachnodeDAO> cacheManager = new CacheManager <TArachnodeDAO>(applicationSettings, webSettings); CookieManager cookieManager = new CookieManager();; CrawlRequestManager <TArachnodeDAO> crawlRequestManager = new CrawlRequestManager <TArachnodeDAO>(applicationSettings, webSettings, cache, consoleManager, discoveryManager); DataTypeManager <TArachnodeDAO> dataTypeManager = new DataTypeManager <TArachnodeDAO>(applicationSettings, webSettings); EncodingManager <TArachnodeDAO> encodingManager = new EncodingManager <TArachnodeDAO>(applicationSettings, webSettings); PolitenessManager <TArachnodeDAO> politenessManager = new PolitenessManager <TArachnodeDAO>(applicationSettings, webSettings, cache); ProxyManager <TArachnodeDAO> proxyManager = new ProxyManager <TArachnodeDAO>(applicationSettings, webSettings, consoleManager); HtmlManager <TArachnodeDAO> htmlManager = new HtmlManager <TArachnodeDAO>(applicationSettings, webSettings, discoveryManager); Crawl <TArachnodeDAO> crawl = new Crawl <TArachnodeDAO>(applicationSettings, webSettings, crawler, actionManager, consoleManager, cookieManager, crawlRequestManager, dataTypeManager, discoveryManager, encodingManager, htmlManager, politenessManager, proxyManager, ruleManager, true); //create a CrawlRequest as this is what the internals of SiteCrawler.dll expect to operate on... CrawlRequest <TArachnodeDAO> crawlRequest = new CrawlRequest <TArachnodeDAO>(new Discovery <TArachnodeDAO>(filesRow.AbsoluteUri), 1, UriClassificationType.Host, UriClassificationType.Host, 0, RenderType.None, RenderType.None); crawlRequest.Crawl = crawl; crawlRequest.Discovery.DiscoveryType = DiscoveryType.File; crawlRequest.Discovery.ID = filesRow.ID; crawlRequest.Data = filesRow.Source; crawlRequest.ProcessData = true; crawlRequest.WebClient = webClient; crawlRequest.WebClient.HttpWebResponse.Headers.Clear(); //parse the ResponseHeaders from the FilesRow.ResponseHeaders string... foreach (string responseHeader in filesRow.ResponseHeaders.Split("\r\n".ToCharArray(), StringSplitOptions.RemoveEmptyEntries)) { string[] responseHeaderSplit = responseHeader.Split(":".ToCharArray()); string name = responseHeaderSplit[0]; string value = UserDefinedFunctions.ExtractResponseHeader(filesRow.ResponseHeaders, name, true).Value; crawlRequest.WebClient.HttpWebResponse.Headers.Add(name, value); } //refresh the DataTypes in the DataTypeManager... (if necessary)... if (dataTypeManager.AllowedDataTypes.Count == 0) { dataTypeManager.RefreshDataTypes(); } crawlRequest.DataType = dataTypeManager.DetermineDataType(crawlRequest); if (applicationSettings.InsertFiles) { crawlRequest.Discovery.ID = arachnodeDAO.InsertFile(crawlRequest.Parent.Uri.AbsoluteUri, crawlRequest.Discovery.Uri.AbsoluteUri, crawlRequest.WebClient.HttpWebResponse.Headers.ToString(), applicationSettings.InsertFileSource ? crawlRequest.Data : new byte[] { }, crawlRequest.DataType.FullTextIndexType, applicationSettings.ClassifyAbsoluteUris); } crawlRequest.ManagedDiscovery = fileManager.ManageFile(crawlRequest, crawlRequest.Discovery.ID.Value, crawlRequest.Discovery.Uri.AbsoluteUri, crawlRequest.Data, crawlRequest.DataType.FullTextIndexType, applicationSettings.ExtractFileMetaData, applicationSettings.InsertFileMetaData, applicationSettings.SaveDiscoveredFilesToDisk); actionManager.PerformCrawlActions(crawlRequest, CrawlActionType.PostRequest, arachnodeDAO); discoveryManager.CloseAndDisposeManagedDiscovery(crawlRequest, arachnodeDAO); }
/// <summary> /// Gets the strength for host. /// </summary> /// <param name = "absoluteUri">The absolute URI.</param> /// <returns></returns> public override double?GetStrengthForHost(string absoluteUri) { double?strength; if (_hyperLinks_MOST_POPULAR_HOSTS_BY_HOSTS.TryGetValue(UserDefinedFunctions.ExtractHost(absoluteUri).Value, out strength)) { return(strength); } return(0); }
/// <summary> /// Gets the priority for host. /// </summary> /// <param name = "absoluteUri">The absolute URI.</param> /// <returns></returns> public override double?GetPriorityForHost(string absoluteUri) { double?strength; if (_priorities.TryGetValue(UserDefinedFunctions.ExtractHost(absoluteUri).Value, out strength)) { return(strength); } return(0); }
public string Suggest(string input) { if (!Check(input)) { ulong hash = Strings.GenerateFuzzyHashCode(input); if (_hashesAndStrings.ContainsKey(hash)) { HashSet <string> hashSet = _hashesAndStrings[hash]; Dictionary <string, int> sortedSuggestions = new Dictionary <string, int>(); foreach (string suggestion in hashSet) { sortedSuggestions.Add(suggestion, UserDefinedFunctions.ComputeLevenstheinDistance(input, suggestion).Value); } return(sortedSuggestions.OrderBy(ss => ss.Value).First().Key); } else { ulong numberOfSetBits = Numbers.NumberOfSetBits(hash); Dictionary <string, int> sortedSuggestions = new Dictionary <string, int>(); foreach (KeyValuePair <ulong, HashSet <string> > keyValuePair in _hashesAndStrings) { if (Numbers.NumberOfSetBits(hash & keyValuePair.Key) >= numberOfSetBits * 0.75) { foreach (string suggestion in keyValuePair.Value) { int levenstheinDistance = UserDefinedFunctions.ComputeLevenstheinDistance(input, suggestion).Value; sortedSuggestions.Add(suggestion, levenstheinDistance); if (levenstheinDistance <= 3) { return(suggestion); } } } } if (sortedSuggestions.Count != 0) { return(sortedSuggestions.OrderBy(ss => ss.Value).First().Key); } } return(null); } return(input); }
public static string ExtractInnerText(string input, string startString, string endString) { string inputToLowerInvariant = input.ToLowerInvariant(); startString = startString.ToLowerInvariant(); int rawIndex = inputToLowerInvariant.IndexOf(startString.Replace("|", "")); if (rawIndex == -1) { rawIndex = inputToLowerInvariant.IndexOf(startString.Replace("|", "'")); } if (rawIndex == -1) { rawIndex = inputToLowerInvariant.IndexOf(startString.Replace("|", "\"")); } if (rawIndex != -1) { int rawIndexEnd = inputToLowerInvariant.IndexOf(endString.Replace("|", ""), rawIndex); if (rawIndexEnd == -1) { rawIndexEnd = inputToLowerInvariant.IndexOf(endString.Replace("|", "'")); } if (rawIndexEnd == -1) { rawIndexEnd = inputToLowerInvariant.IndexOf(endString.Replace("|", "\"")); } if (rawIndexEnd != -1) { if (rawIndexEnd - rawIndex >= 1) { string raw = input.Substring(rawIndex, rawIndexEnd - rawIndex); string rawWithHyperLinks = raw; rawWithHyperLinks = EncapsulateHyperLinks(rawWithHyperLinks); raw = UserDefinedFunctions.ExtractText("<" + raw + ">").Value.Trim().TrimStart('<').TrimEnd('>').Trim().TrimStart('<').TrimEnd('>'); rawWithHyperLinks = UserDefinedFunctions.ExtractText("<" + rawWithHyperLinks + ">").Value.Trim().TrimStart('<').TrimEnd('>').Trim().TrimStart('<').TrimEnd('>'); return(raw + "|" + rawWithHyperLinks); } else { } } } return(null); }
public Request() { Updated = Created = DateTime.Now; this.Routings = new HashSet <RequestRouting>(); //this.Documents = new HashSet<Document>(); this.SID = UserDefinedFunctions.NewGuid(); this.SearchTerms = new HashSet <RequestSearchTerm>(); this.RequestSearchResults = new HashSet <Request>(); this.DataMartSearchResults = new HashSet <DataMart>(); this.OrganizationSearchResults = new HashSet <Organization>(); this.InResults = new HashSet <Request>(); this.Folders = new HashSet <RequestSharedFolder>(); }
public Organization() { this.Users = new HashSet <User>(); this.Groups = new HashSet <Group>(); this.DataMarts = new HashSet <DataMart>(); this.Children = new HashSet <Organization>(); this.SecurityGroups = new HashSet <OrganizationSecurityGroup>(); this.Requests = new HashSet <Request>(); this.Registries = new HashSet <OrganizationRegistry>(); this.SID = UserDefinedFunctions.NewGuid(); this.EHRSes = new HashSet <OrganizationEHRS>(); this.InSearchResults = new HashSet <Request>(); }