public void TestSingleWord() { var index = new WordIndex(); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[0])); index.Add("w1", "attr2"); index.Add("w1", "attr1"); index.Add("w1", "attr3"); Assert.Throws <ArgumentException>(() => index.Add("w1", "attr1")); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[] { "attr1", "attr2", "attr3" })); Assert.That(index.GetWordNames("attr1"), Is.EquivalentTo(new string[] { "w1" })); Assert.That(index.GetWordNames("attr2"), Is.EquivalentTo(new string[] { "w1" })); Assert.That(index.GetWordNames("attr3"), Is.EquivalentTo(new string[] { "w1" })); Assert.Throws <KeyNotFoundException>(() => index.Remove("wX", "attr1")); Assert.Throws <KeyNotFoundException>(() => index.Remove("w1", "attrX")); index.Remove("w1", "attr2"); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[] { "attr1", "attr3" })); Assert.That(index.GetWordNames("attr1"), Is.EquivalentTo(new string[] { "w1" })); Assert.That(index.GetWordNames("attr2"), Is.EquivalentTo(new string[0])); Assert.That(index.GetWordNames("attr3"), Is.EquivalentTo(new string[] { "w1" })); index.Remove("w1", "attr1"); index.Remove("w1", "attr3"); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[0])); Assert.That(index.GetWordNames("attr1"), Is.EquivalentTo(new string[0])); Assert.That(index.GetWordNames("attr2"), Is.EquivalentTo(new string[0])); Assert.That(index.GetWordNames("attr3"), Is.EquivalentTo(new string[0])); }
public void WordIndex_Basic() { WordIndex index = new WordIndex(new DefaultWordSplitter()); // Verify no matches initially Assert.AreEqual("", GetMatchExact(index, "anything")); // Set a value index.Index(1, "", "this is the original value!"); // Verify strings in word match Assert.AreEqual("1", GetMatchExact(index, "this")); Assert.AreEqual("1", GetMatchExact(index, "value")); // Verify partial words don't match exact, do match Assert.AreEqual("", GetMatchExact(index, "thi")); Assert.AreEqual("1", GetMatches(index, "thi")); // Change the value index.Index(1, "this is the original value!", "this is the updated value!"); // Verify removed strings no longer match Assert.AreEqual("", GetMatches(index, "original")); // Verify maintained and added strings continue to match Assert.AreEqual("1", GetMatchExact(index, "value")); Assert.AreEqual("1", GetMatchExact(index, "updated")); // Index a few more items index.Index(2, "", "I have a different original value."); index.Index(3, "", "value, Yet a third"); Assert.AreEqual("1, 2, 3", GetMatchExact(index, "value")); Assert.AreEqual("2", GetMatchExact(index, "original")); }
public List <ItemInfo> Query(string query) { if (Index == null) { return(new List <ItemInfo>()); } if (!query.All(Char.IsLetter)) { return(PlainQuery(query, false)); } var regex = new Regex("^" + Regex.Escape(query), RegexOptions.IgnoreCase | RegexOptions.Compiled); var result = AbbrIndex.Where(i => regex.IsMatch(i.Item1)) .Concat(WordIndex.Where(i => regex.IsMatch(i.Item1))) .OrderBy(i => i.Item2) .ThenBy(i => i.Item3.Name, MyPathComparer) .ThenBy(i => i.Item3.Path, MyPathComparer) .Select(i => i.Item3) .ToList(); if (result.Count > 0) { return(result); } return(PlainQuery(query, true)); }
public void TestUpdate() { var index = new WordIndex(); index.Update(null, new List <string>(), "w1", new List <string> { "attr1" }); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[] { "attr1" })); Assert.That(index.GetWordNames("attr1"), Is.EquivalentTo(new string[] { "w1" })); index.Update("w1", new List <string> { "attr1" }, "w1", new List <string> { "attr2", "attr3" }); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[] { "attr2", "attr3" })); Assert.That(index.GetWordNames("attr1"), Is.EquivalentTo(new string[0])); Assert.That(index.GetWordNames("attr2"), Is.EquivalentTo(new string[] { "w1" })); Assert.That(index.GetWordNames("attr3"), Is.EquivalentTo(new string[] { "w1" })); index.Update("w1", new List <string> { "attr2", "attr3" }, "w2", new List <string> { "attr3", "attr1" }); Assert.That(index.GetAttributes(), Is.EquivalentTo(new string[] { "attr1", "attr3" })); Assert.That(index.GetWordNames("attr1"), Is.EquivalentTo(new string[] { "w2" })); Assert.That(index.GetWordNames("attr2"), Is.EquivalentTo(new string[0])); Assert.That(index.GetWordNames("attr3"), Is.EquivalentTo(new string[] { "w2" })); }
private static string GetMatches(WordIndex index, string word) { ShortSet results = new ShortSet(ushort.MaxValue); index.WhereMatches(word, results); return(String.Join(", ", results.Values)); }
private void checkRow(Tile[] row) { // make a string string rowString = ""; for (int i = 0; i < row.Length; i++) { if (row[i] != null) { rowString += row[i].letter; } else { rowString += " "; } } //Debug.Log("Checking rowString: '" + rowString + "'"); WordIndex wordIndex = checkRowString(0, tileCount, rowString); //Debug.Log("Found Word: '" + rowString.Substring(wordIndex.start, wordIndex.length) + "'"); for (int i = wordIndex.start, j = 0; j < wordIndex.length; i++, j++) { Destroy(row[i].gameObject); } }
/// <summary> /// Compares the current instance with another instance. /// </summary> /// <param name="other">The other instance.</param> /// <returns>The comparison result.</returns> /// <remarks><b>The First Char Index does not partecipate to the comparison.</b></remarks> public int CompareTo(BasicWordInfo other) { if (other == null) { return(1); } int res = Location.CompareTo(other.Location) * 2; return(res + WordIndex.CompareTo(other.WordIndex)); }
private static string GetIndexData(WordIndex index) { Dictionary <string, List <ushort> > d = index.ConvertToDictionary(); StringBuilder result = new StringBuilder(); foreach (string word in d.Keys) { result.AppendLine(String.Format("{0}: [{1}]", word, String.Join(", ", d[word]))); } return(result.ToString()); }
private static void Main(string[] args) { var wordIndex = new WordIndex(); var readWords = ReadWords(new List <string>()) .SelectMany(list => list.Where(s => s.Length > 3)) .Take(10 * 1000 * 1000) .ToArray(); int j1 = 0; foreach (var source in readWords.Take(10)) { Console.WriteLine(j1++ + " " + source); } wordIndex.InsertPortion(readWords.Select((s, i) => new Tuple <int, string>(i, s.ToString()))); Console.WriteLine(GC.GetTotalMemory(false) / 1024 / 1024); Console.WriteLine(); foreach (var word in readWords.Skip(10).Take(10)) { for (int j = 0; j < word.Length - 3; j++) { string subWord = word.Substring(j); var res = wordIndex.FindBySubWord(subWord).ToArray(); for (int i = 0; i < readWords.Length; i++) { if (res.Contains(i)) { if (readWords[i].Contains(subWord)) { continue; } else { Console.WriteLine(readWords[i]); } } else { if (readWords[i].Contains(subWord)) { throw new Exception("mising"); } } } } } }
/// <summary> /// Compares the current instance with another instance. /// </summary> /// <param name="other">The other instance.</param> /// <returns>The comparison result.</returns> /// <remarks><b>The First Char Index does not partecipate to the comparison.</b></remarks> public int CompareTo(WordInfo other) { if (other == null) { return(1); } // text has a distance module of 1 // location has a distance module of 2 // wordIndex has a distance module of 3 int res = Location.CompareTo(other.Location) * 2; int res2 = WordIndex.CompareTo(other.WordIndex) * 3; return(res + res2 + Text.CompareTo(other.Text)); }
public void WordIndex_ValueRequiresSplit() { WordIndex index = new WordIndex(new DefaultWordSplitter()); // Verify no matches initially Assert.AreEqual("", GetMatches(index, "will be split")); // Set a value index.Index(1, "", "this value will be split"); // Verify the item matches (the search term must be split) Assert.AreEqual("1", GetMatches(index, "will be split")); // Verify the item only matches if all terms are found Assert.AreEqual("", GetMatches(index, "will be split also")); }
public static void Main(String[] args) { WordIndex wi = new WordIndex(); wi.Add("ja", 2); wi.Add("nej", 4); wi.Add("ja", 7); foreach (string s in wi.Keys) { Console.WriteLine(s + " -->"); foreach (int line in wi[s]) { Console.WriteLine(" " + line); } } }
public Dictionary(int maxWordLength) { _words = new List <string> [maxWordLength + 1]; for (int i = 1; i <= maxWordLength; i++) { _words[i] = new List <string>(); } _indexes = new WordIndex[maxWordLength + 1]; for (int i = 1; i <= maxWordLength; i++) { _indexes[i] = new WordIndex(i); } _filter = new WordFilter(1, maxWordLength); _description = new Dictionary <string, string>(); }
public void WordIndex_MultipleBlocks() { WordIndex index = new WordIndex(new DefaultWordSplitter()); for (int i = 0; i < 70000; ++i) { string word = "Word" + i.ToString(); index.AddWord(1, word); if (i % 5 == 0) { index.AddWord(2, word); } } Assert.AreEqual("1", GetMatchExact(index, "Word6")); Assert.AreEqual("1, 2", GetMatchExact(index, "Word5")); Assert.AreEqual("1, 2", GetMatchExact(index, "Word69000")); }
public void WordIndex_Serialization() { WordIndex index = new WordIndex(new DefaultWordSplitter()); // Set a value index.Index(1, "", "this is the original value!"); using (SerializationContext context = new SerializationContext(new MemoryStream())) { index.WriteBinary(context); context.Stream.Seek(0, SeekOrigin.Begin); WordIndex index2 = new WordIndex(new DefaultWordSplitter()); index2.ReadBinary(context); Assert.AreEqual("1", GetMatchExact(index, "value")); } }
public void WordIndex_ResizeAndRemove() { WordIndex index = new WordIndex(new DefaultWordSplitter()); // Verify index totally empty to start Assert.AreEqual("", GetIndexData(index)); // Add words so that we have one with all items, two with half, and several unique words index.Index(0, "", "one sample original sample"); index.Index(1, "", "two sample original sample"); index.Index(2, "", "three sample original"); index.Index(3, "", "four other original"); index.Index(4, "", "five other original"); index.Index(5, "", "six other original"); Assert.AreEqual("0, 1, 2, 3, 4, 5", GetMatchExact(index, "original")); Assert.AreEqual("0, 1, 2", GetMatchExact(index, "sample")); Assert.AreEqual("0", GetMatchExact(index, "one")); // Remove a unique word. Add a second and fourth value. Keep a word unchanged. index.Index(0, "one sample original sample", "three other original"); Assert.AreEqual("", GetMatchExact(index, "one")); Assert.AreEqual("0, 1, 2, 3, 4, 5", GetMatchExact(index, "original")); Assert.AreEqual("0, 3, 4, 5", GetMatchExact(index, "other")); Assert.AreEqual("1, 2", GetMatchExact(index, "sample")); // Remove all other values for 'sample', verify removed index.Index(1, "two sample original sample", "two other original"); index.Index(2, "three sample original", "three other original"); Assert.AreEqual("0, 1, 2, 3, 4, 5", GetMatchExact(index, "original")); Assert.AreEqual("", GetMatchExact(index, "sample")); Assert.AreEqual("0, 1, 2, 3, 4, 5", GetMatchExact(index, "other")); // Clear values. Verify index empties index.Index(0, "three other original", ""); index.Index(1, "two other original", ""); index.Index(2, "three other original", ""); index.Index(3, "four other original", ""); index.Index(4, "five other original", ""); index.Index(5, "six other original", ""); Assert.AreEqual("", GetIndexData(index)); }
public static WordIndex BuildIndexFromFiles(IEnumerable<FileInfo> wordFiles, string rootFolderPath) { var index = new WordIndex(); var wordSeparators = Enumerable.Range(32, 127).Select(e => Convert.ToChar(e)).Where(c => !WordChars.Contains(c)).ToArray(); wordSeparators = wordSeparators.Concat(new[] { '\t' }).ToArray(); // add tab foreach (var file in wordFiles) { var lineNumber = 1; var displayFileName = file.FullName.Replace(rootFolderPath, string.Empty); using (var reader = file.OpenText()) { while (!reader.EndOfStream) { var words = reader .ReadLine() .Split(wordSeparators, StringSplitOptions.RemoveEmptyEntries); ////.Select(f => f.Trim()); var wordIndex = 1; foreach (var word in words) { if (word.Length >= MinumumWordLength) { index.AddWordOccurrence(word, displayFileName, file.FullName, lineNumber, wordIndex++); } } lineNumber++; } } } return index; }
private static void CalcStats(Bible bible, WordIndex idx) { Int32 maxVerseSize = 0; // Get the words s_CSwords = idx.CaseSensitiveWords.GetAllWords(); s_CSwordsRev = idx.CaseSensitiveWords.GetAllWordsRev(s_CSwords); s_CIwords = idx.CaseInsensitiveWords.GetAllWords(); s_CIwordsRev = idx.CaseInsensitiveWords.GetAllWordsRev(s_CIwords); using (StreamWriter wr = System.IO.File.CreateText("c:\\words.txt")) { foreach (WordObject w in s_CIwords) { wr.WriteLine(w.Word); } } // Calc word data size & verse ref elements for Case Sensitive words s_CSwordDataSize = 0; s_CSverseRefElements = 0; foreach (WordObject t in WordsCaseSensitive) { s_CSwordDataSize += t.Word.Length; s_CSverseRefElements += t.VerseRefCount; } // Calc word data size & verse ref elements for Case Sensitive words s_CIwordDataSize = 0; s_CIverseRefElements = 0; foreach (WordObject t in WordsCaseInsensitive) { s_CIwordDataSize += t.Word.Length; s_CIverseRefElements += t.VerseRefCount; } // Calc num chapters, num verses, and verse size s_chapIndexElements = 0; s_verseIndexElements = 0; s_verseDataSize = 0; for (Int32 i = 0; i < bible.Books.Count; ++i) { Book book = bible.Books[(BookName) i]; s_chapIndexElements += book.Chapters.Count; for (Int32 j = 0; j < book.Chapters.Count; ++j) { Chapter ch = book.Chapters[j]; s_verseIndexElements += ch.Verses.Count; if (ch.Preface != null) { ++s_verseIndexElements; ch.Preface.ProcessItalicIndex(s_italicPosIndex); ch.Preface.ProcessWordPosIndex(s_wordPosIndex); maxVerseSize = Math.Max(maxVerseSize, ch.Preface.NonVerseData.Length); s_verseDataSize += ch.Preface.NonVerseData.Length; } for (Int32 k = 0; k < ch.Verses.Count; ++k) { Verse v = ch.Verses[k]; v.ProcessItalicIndex(s_italicPosIndex); v.ProcessWordPosIndex(s_wordPosIndex); maxVerseSize = Math.Max(maxVerseSize, v.VerseData.Length); s_verseDataSize += v.VerseData.Length; } } if (book.Postscript != null) { ++s_verseIndexElements; book.Postscript.ProcessItalicIndex(s_italicPosIndex); book.Postscript.ProcessWordPosIndex(s_wordPosIndex); maxVerseSize = Math.Max(maxVerseSize, book.Postscript.NonVerseData.Length); s_verseDataSize += book.Postscript.NonVerseData.Length; } } // Calc extra markup size s_extraMarkupSize = checked((Int32) bible.BibleMarkup.Length); Console.WriteLine("Extra markup size: {0}", s_extraMarkupSize); Console.WriteLine("Max verse data size: {0}", maxVerseSize); Console.WriteLine("Max words per verse: {0}", s_wordPosIndex.MaxWordsPerVerse); Console.WriteLine("Max italics per verse: {0}", s_italicPosIndex.MaxItalicsPerVerse); }
public static void Write(Bible bible, WordIndex idx, BinaryWriter wr) { Console.WriteLine("Writing bible..."); CalcStats(bible, idx); var header = new HeaderWriter(); WritePos(wr, "Header Begin"); header.WriteHeader(wr); WritePos(wr, "Case Sensitive Begin"); WriteWordIndex(WordsCaseSensitive, WordsRevCaseSensitive, wr); WritePos(wr, " Word Data"); WriteWordData(WordsCaseSensitive, wr); WritePos(wr, " Verse Refs"); WriteVerseRefs(WordsCaseSensitive, wr); WritePos(wr, "Case Sensitive Begin"); WriteWordIndex(WordsCaseInsensitive, WordsRevCaseInsensitive, wr); WritePos(wr, " Word Data"); WriteWordData(WordsCaseInsensitive, wr); WritePos(wr, " Verse Refs"); WriteVerseRefs(WordsCaseInsensitive, wr); WritePos(wr, "Book Index Begin"); WriteBookIndex(bible, wr); WritePos(wr, "Chapter Index Begin"); WriteChapIndex(bible, wr); WritePos(wr, "Verse Index Begin"); WriteVerseIndex(bible, wr); WritePos(wr, "Verse Data Begin"); WriteVerseData(bible, wr); WritePos(wr, "Extra Markup Begin"); WriteExtraMarkup(bible, wr); WritePos(wr, "Word Pos Index Begin"); WordPosIndex.Write(wr); WritePos(wr, "Italic Pos Index Begin"); ItalicsPosIndex.Write(wr); WritePos(wr, "End of File"); }
// Word Index routines private void ProcessWordIndex() { _wordIndex = new WordIndex(_bible); _wordIndex.ProcessBible(); _wordIndex.Shrink(); }
void DisplayWordList() { var center_word = new GUIStyle(GUI.skin.label) { alignment = TextAnchor.MiddleCenter }; EditorGUILayout.BeginVertical(GUILayout.Width(250)); EditorGUILayout.Space(); EditorGUILayout.BeginHorizontal(GUILayout.ExpandWidth(true)); search_word = EditorGUILayout.TextField(editor_contents[10], search_word, GUILayout.ExpandWidth(true)); search_word = Regex.Replace(search_word, alphaOnly, ""); if (GUILayout.Button(editor_contents[11], GUILayout.Width(25))) { curr_search = search_word = string.Empty; GUI.FocusControl(null); } EditorGUILayout.EndHorizontal(); scrollPos = EditorGUILayout.BeginScrollView(scrollPos, "box", GUILayout.ExpandHeight(true)); if (search_word.Length > 0 && (search_dirty || curr_search.Length != search_word.Length || string.Compare(curr_search, search_word, true) != 0)) { curr_search = search_word; if (curr_search.Length == 1) { ShowAlphaBetList(char.ToLower(curr_search[0]), center_word); } else if (curr_search.Length > 1) { db.SearchWords(curr_search, ref search_result); if (search_result.Count > 0) { for (int i = 0; i < search_result.Count; ++i) { var curr_word = db[search_result[i].Left][search_result[i].Right].word; EditorGUILayout.BeginHorizontal(); if (GUILayout.Button(curr_word, GUILayout.ExpandWidth(true))) { selected_word = new WordIndex(search_result[i].Left, search_result[i].Right); newWord = db[selected_word.group, selected_word.index].word; newHint = db[selected_word.group, selected_word.index].hint; state = State.EDIT; } if (GUILayout.Button("-", GUILayout.Width(25))) { db.RemoveWord(search_result[i].Left, search_result[i].Right); EditorUtility.SetDirty(db); BlankInterface(); search_dirty = true; return; } EditorGUILayout.EndHorizontal(); } } else { EditorGUILayout.BeginVertical(); EditorGUILayout.LabelField("No words found starting with " + curr_search, center_word); if (state != State.ADD) { if (GUILayout.Button("Add " + curr_search)) { newWord = curr_search; newHint = string.Empty; state = State.ADD; } } EditorGUILayout.EndVertical(); } } search_dirty = true; } else { for (char c = 'a'; c <= 'z'; ++c) { // Alphabet area (of current alphabet c) ShowAlphaBetList(c, center_word); } } EditorGUILayout.EndScrollView(); EditorGUILayout.BeginHorizontal(GUILayout.ExpandWidth(true)); EditorGUILayout.LabelField("Total words: " + db.Size.ToString()); EditorGUILayout.Space(); if (GUILayout.Button(editor_contents[2])) { state = State.ADD; newWord = newHint = string.Empty; } EditorGUILayout.EndHorizontal(); EditorGUILayout.Space(); words_per_page = EditorGUILayout.IntSlider(editor_contents[9], words_per_page, 1, 20, GUILayout.ExpandWidth(true)); DisplayFileLoader(); if (GUILayout.Button(editor_contents[5], GUILayout.ExpandWidth(true))) { if (EditorUtility.DisplayDialog("Confirm destructive action", "Are you sure you want to clear all words from the database? It cannot be undone.", "Yes", "No")) { db.ClearAllWords(); BlankInterface(); if (curr_search.Length > 0) { search_dirty = true; } EditorUtility.SetDirty(this); return; } } EditorGUILayout.Space(); EditorGUILayout.EndVertical(); }
void ShowAlphaBetList(char c, GUIStyle style = null) { var curr_list = db[c]; if (curr_list.Count > 0) { show_alpha[c - 'a'] = EditorGUILayout.Foldout(show_alpha[c - 'a'], char.ToUpper(c).ToString() + " (" + curr_list.Count.ToString() + " word" + ((curr_list.Count == 1) ? "" : "s") + ")"); if (show_alpha[c - 'a']) { // word list area (of words starting with current alphabet c) EditorGUILayout.BeginHorizontal(); if (show_page[c - 'a'] > 0) { // enable prev page button if (GUILayout.Button(editor_contents[7], GUILayout.Width(25))) { show_page[c - 'a'] = show_page[c - 'a'] - 1; } } else { GUI.enabled = false; GUILayout.Button(editor_contents[7], "button", GUILayout.Width(25)); GUI.enabled = true; } if (style != null) { EditorGUILayout.LabelField("Page " + (show_page[c - 'a'] + 1).ToString(), style, GUILayout.ExpandWidth(true)); } else { EditorGUILayout.LabelField("Page " + (show_page[c - 'a'] + 1).ToString(), GUILayout.ExpandWidth(true)); } if ((show_page[c - 'a'] + 1) * words_per_page < curr_list.Count) { // enable next page button if (GUILayout.Button(editor_contents[8], GUILayout.Width(25))) { show_page[c - 'a'] = show_page[c - 'a'] + 1; } } else { GUI.enabled = false; GUILayout.Button(editor_contents[8], "button", GUILayout.Width(25)); GUI.enabled = true; } EditorGUILayout.EndHorizontal(); // show only some words and let user flip through pages to prevent lag. for (int i = show_page[c - 'a'] * words_per_page; i < ((show_page[c - 'a'] + 1) * words_per_page) && i < curr_list.Count; ++i) { EditorGUILayout.BeginHorizontal(); if (GUILayout.Button(curr_list[i].word, GUILayout.ExpandWidth(true))) { selected_word = new WordIndex(c, i); newWord = db[selected_word.group, selected_word.index].word; newHint = db[selected_word.group, selected_word.index].hint; state = State.EDIT; } if (GUILayout.Button("-", GUILayout.Width(25))) { db.RemoveWord(c, i); if (i > curr_list.Count - 1 && show_page[c - 'a'] > 0) { --show_page[c - 'a']; } EditorUtility.SetDirty(db); BlankInterface(); return; } EditorGUILayout.EndHorizontal(); } // end word list area } } else { show_alpha[c - 'a'] = false; // EditorGUILayout.LabelField(char.ToUpper(c).ToString()); } // end alphabet area }
// // GET: /Result/ public ActionResult Index(string keyword) { // If keyword is null or empty then system will redirect to home page if (keyword == null || keyword.Trim().Equals("")) { return(RedirectToAction("Index", "Home")); } // Instance model layer ResultModel resultModel = new ResultModel(); // Instance view model for result ResultViewModel resultViewModel = new ResultViewModel(); // Get entry by keyword WordIndex entry = resultModel.GetEntryByKeyword(keyword); // Property of search history bool isExistData = true; // Check has entry if (entry != null) { //set view model resultViewModel.Entry = entry; // List result if (entry.Synsets.Where(x => x.Category == "n").Count() > 0) { ViewBag.Noun = entry.Synsets.Where(x => x.Category == "n"); } if (entry.Synsets.Where(x => x.Category == "v").Count() > 0) { ViewBag.Verb = entry.Synsets.Where(x => x.Category == "v"); } //add history isExistData = true; } else { isExistData = false; } //add history SearchHistoryModel searchHistoryModel = new SearchHistoryModel(); searchHistoryModel.AddSearchHistory(keyword, isExistData); //add user history if (Request.IsAuthenticated) { UserHistoryModel userHistoryModel = new UserHistoryModel(); userHistoryModel.AddUserHistory(keyword); } //--------------------------------------- // Store 5 recent keywords if (Session["Recent"] == null) { Session["Recent"] = new List <string>(); } int countRecent = ((List <string>)Session["Recent"]).Count; if (((List <string>)Session["Recent"]).Contains(keyword)) { ((List <string>)Session["Recent"]).Remove(keyword); } else if (countRecent >= 5) { ((List <string>)Session["Recent"]).RemoveAt(0); } ((List <string>)Session["Recent"]).Add(keyword); //((List<string>)Session["Recent"]).Reverse(); //string[] arraySuggest = new string[((List<string>)Session["Recent"]).Count]; //((List<string>)Session["Recent"]).CopyTo(arraySuggest); //ViewBag.Recent = arraySuggest; //((List<string>)Session["Recent"]).Reverse(); //--------------------------------------- //ViewBag.Title ViewBag.Title = "Kết quả tra cứu cho '" + keyword + "'"; // add suggest terms EntriesModel entryModel = new EntriesModel(); resultViewModel.SuggestTerm = entryModel.SuggestTerm(keyword); // get questions QAModel qaModel = new QAModel(); resultViewModel.Questions = qaModel.GetQuestionPaged(1, 10); return(View(resultViewModel)); }
private static void Main(string[] args) { string path = ProjectDirectory.GetProjectDirectory(); if (!Directory.Exists(path + "../Databases/simple triple store")) { Directory.CreateDirectory(path + "../Databases/simple triple store"); } Console.WriteLine("Start TestGenerator"); TextWriter res = new StreamWriter(new FileStream(path + "res.txt", FileMode.Append, FileAccess.Write)); XElement xcnf = XElement.Load(path + "tests.xml"); XElement xcommon = XElement.Load(path + "../common.xml"); xcommon.Add(xcnf); Random rnd; System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); SimpleTripleStore.SimpleTripleStore simpleTripleStore = new SimpleTripleStore.SimpleTripleStore(path + "../Databases/simple triple store/", 1000 * 1000); foreach (XElement xprobe in xcnf.Elements()) { ProbeFrame probe = new ProbeFrame(xprobe.AncestorsAndSelf().Attributes()); int npersons = (int)probe.siz; if (probe.sol == "simpleTripleStore_load") { // Directory.Delete(path + "../Databases/simple triple store", true); sw.Restart(); Polar.Data.Phototeka generator = new Polar.Data.Phototeka(npersons, 777777); simpleTripleStore.Build(generator.GenerateRDF()); sw.Stop(); Console.WriteLine("Load ok. Duration={0}", sw.ElapsedMilliseconds); // 10000: 14.9 сек. probe.ndx = sw.ElapsedMilliseconds; res.WriteLine(probe.ToCSV()); } else if (probe.sol == "simpleTripleStore_SelectById") { rnd = new Random(777777777); sw.Restart(); long sum = 0; for (int i = 0; i < probe.nte; i++) { int id = rnd.Next(0, (int)probe.siz - 1); sum += Convert.ToInt32(simpleTripleStore.GetDirects(id).FirstOrDefault(tuple => tuple.Item1 == "age").Item2); } sw.Stop(); probe.tim = sw.ElapsedMilliseconds; probe.sum = sum; Console.WriteLine("SelectById ok. Duration={0}", sw.ElapsedMilliseconds); // 7 res.WriteLine(probe.ToCSV()); } else if (probe.sol == "simpleTripleStore_SearchByName") { rnd = new Random(777777777); sw.Restart(); WordIndex index = new WordIndex(); for (int i = 0; i < probe.siz; i++) { var w = simpleTripleStore.GetObject(i, "name").First(); index.Insert(w, i); } Console.WriteLine("build words trigrams index " + sw.ElapsedMilliseconds); Console.WriteLine("RAM used {0} mb.", GC.GetTotalMemory(false) / 1024 / 1024); sw.Restart(); long sum = 0, sum2 = 0; for (int i = 0; i < probe.nte; i++) { int id = rnd.Next(0, (int)probe.siz - 1); string namePrefix = "Pupkin" + id / 10; sum += (int)index.FindBySubWord(namePrefix).Count(); } Console.WriteLine(sum); Console.WriteLine(sum2); sw.Stop(); probe.tim = sw.ElapsedMilliseconds; probe.sum = sum; Console.WriteLine("SearchByName ok. Duration={0}", sw.ElapsedMilliseconds); // 7 res.WriteLine(probe.ToCSV()); } else if (probe.sol == "simpleTripleStore_GetRelationByPerson") { rnd = new Random(777777777); sw.Restart(); long sum = 0; for (int i = 0; i < probe.nte; i++) { int persId = rnd.Next(0, (int)probe.siz - 1); sum += simpleTripleStore.GetSubjects("reflected", persId.ToString()) .Select(refid => simpleTripleStore.GetObject(refid, "in_doc").First()) .Select(int.Parse) .Select(photoId => simpleTripleStore.GetDirects(photoId)) .Count(); } sw.Stop(); probe.tim = sw.ElapsedMilliseconds; probe.sum = sum; Console.WriteLine("GetRelationByPerson ok. Duration={0}", sw.ElapsedMilliseconds); // 7 res.WriteLine(probe.ToCSV()); } else if (probe.sol == "unused") { } } res.Close(); }
public IndexedColumn(IColumn <ByteBlock> column, IWordSplitter splitter) : base(column) { this.Splitter = splitter; _index = new WordIndex(splitter); }